From 2f2e0dab0420692a07dd096b012ebec3667166f2 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 13 Sep 2023 01:45:10 +0200 Subject: Try pi4 build env again. Shit still not working ... --- doc/note/qemu/php-dev-server.txt | 6 ++++++ doc/note/qemu/qemu.txt | 39 +++++++++++++++++++++++++++------------ 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/doc/note/qemu/php-dev-server.txt b/doc/note/qemu/php-dev-server.txt index 98c5700..c1a8b01 100644 --- a/doc/note/qemu/php-dev-server.txt +++ b/doc/note/qemu/php-dev-server.txt @@ -33,6 +33,7 @@ true `# Setup mounts & persistence (host)` \ && true true `# Run dev server (guest)` \ + && cd "${GUESTWD:?}" \ && DEBUG=1 php -dassert.bail=1 -dzend.assertions=1 -dassert.exception=1 -S 0.0.0.0:8080 src/index.php \ && true @@ -40,3 +41,8 @@ true `# Copy persistence from vm back to host (host)` \ && $SSH -- sh -c "true && cd \"${GUESTWD:?}\" && tar c \"${CPY_OUT:?}\"" | tar x \ && true + +## Links + +- [Install old php](https://tecadmin.net/how-to-install-php-on-debian-12/) + diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index de7a71b..657faa9 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -7,10 +7,17 @@ Qemu apt install qemu-system-aarch64 qemu-efi-aarch64 -## Create Image +## Manage Images +### Create new image qemu-img create -f qcow2 my_disk.qcow2 16G +### Convert qcow2 to raw + qemu-img convert -f qcow2 -O raw foo.qcow2 foo.img + +### Convert raw to qcow2 + qemu-img convert -f raw -O qcow2 foo.img foo.qcow2 + ## Shrink img @@ -47,19 +54,26 @@ Regular boot ## aarch64 (not working yet) - MAC='00:de:ad:de:ad:00' - DISK=my_disk.qcow2 - cp /usr/share/AAVMF/AAVMF_CODE.fd ./flash1.img - qemu-system-aarch64 -m 1G -cpu cortex-a57 -M virt \ - -pflash /usr/share/AAVMF/AAVMF_CODE.fd \ - -pflash flash1.img \ - -drive if=none,file=${DISK:?},id=hd0 \ - -device virtio-blk-device,drive=hd0 \ - -device virtio-net-device,netdev=net0,mac=${MAC:?} + apt install -y --no-install-recommends qemu-uefi-aarch64 + curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/raw/master/kernel-qemu-4.4.34-jessie + curl -sSLO https://downloads.raspberrypi.org/raspios_lite_arm64/images/raspios_lite_arm64-2023-05-03/2023-05-03-raspios-bullseye-arm64-lite.img.xz + xz -d 2023-05-03-raspios-bullseye-arm64-lite.img.xz + qemu-img convert -f raw -O qcow2 2023-05-03-raspios-bullseye-arm64-lite.img raspbian-bullseye-lite.qcow2 + qemu-img resize raspbian-bullseye-lite.qcow2 16G + mv raspbian-bullseye-lite.qcow2 hda.qcow2 - qemu-system-aarch64 -M virt -hda my_disk.qcow2 -cdrom debian.iso -boot c -m 1G + qemu-system-aarch64 \ + -bios /usr/share/qemu-efi-aarch64/QEMU_EFI.fd \ + -hda hda.qcow2 \ + -cpu cortex-a72 -m 256 \ + -M versatilepb \ + -no-reboot \ + -serial stdio \ + -net nic -net user \ - qemu-system-aarch64 -M virt -cpu cortex-a57 -m 1G -bios /usr/share/qemu-efi-aarch64/QEMU_EFI.fd -hda my_disk.qcow2 -cdrom ~/images/debian-12.0.0-arm64-DVD/debian-12.0.0-arm64-DVD-1.iso + -cpu arm1176 -m 256 \ + -net tap,ifname=vnet0,script=no,downscript=no \ + -bios /usr/share/ovmf/OVMF.fd \ ## Shared host directory via CIFS/SMB @@ -179,4 +193,5 @@ TODO: move this to a better place. Eg: debian/setup.txt or whatever. - [USB pass-through](https://unix.stackexchange.com/a/452946/292722) - [qemu monitor via telnet](https://unix.stackexchange.com/a/426951/292722) - [qemu monitor via stdio](https://unix.stackexchange.com/a/57835/292722) +- [qemu raspberry pi TODO](https://blog.agchapman.com/using-qemu-to-emulate-a-raspberry-pi/) -- cgit v1.1 From fa6e23f786f7ce59fd00f80c1202821978f4986d Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 21 Sep 2023 17:44:24 +0200 Subject: Add some stats just for fun --- README.txt | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/README.txt b/README.txt index 8064101..ced31c3 100644 --- a/README.txt +++ b/README.txt @@ -7,3 +7,23 @@ Just some random garbage which was handy in some way somewhen. Not yet migrated scripts see "C:/Users/fankhauseand/OneDrive - POSTCHAG/doc" + +## Stats For Nerds + +github.com/AlDanial/cloc v 1.81 T=0.53 s (84.3 files/s, 11729.9 lines/s) +------------------------------------------------------------------------------- +Language files blank comment code +------------------------------------------------------------------------------- +Lua 11 238 286 2259 +JavaScript 6 165 50 1069 +C 3 146 40 759 +Java 19 158 242 570 +Bourne Shell 1 13 7 104 +XML 2 9 22 41 +Markdown 1 18 0 35 +JSON 1 0 0 18 +C/C++ Header 1 4 0 12 +------------------------------------------------------------------------------- +SUM: 45 751 647 4867 +------------------------------------------------------------------------------- + -- cgit v1.1 From d8ba8f6616d0b9d4581596af3f9a678c38fc1266 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 22 Sep 2023 11:57:20 +0200 Subject: (bkup) Add some excludes (and sort them) --- src/main/shell/BackupByRsync/backup.sh | 46 ++++++++++++++++++---------------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/src/main/shell/BackupByRsync/backup.sh b/src/main/shell/BackupByRsync/backup.sh index 40189c2..938b807 100755 --- a/src/main/shell/BackupByRsync/backup.sh +++ b/src/main/shell/BackupByRsync/backup.sh @@ -53,57 +53,61 @@ run () { rsync --archive --verbose \ --link-dest "${DIR_TO}/latest/${DST_PREFIX:?}" \ --filter=':- .gitignore' \ + --exclude=".git/branches" \ --exclude=".git/COMMIT_EDITMSG" \ --exclude=".git/FETCH_HEAD" \ - --exclude=".git/ORIG_HEAD" \ - --exclude=".git/branches" \ --exclude=".git/hooks/*.sample" \ --exclude=".git/index" \ --exclude=".git/info" \ --exclude=".git/logs" \ --exclude=".git/objects" \ + --exclude=".git/ORIG_HEAD" \ --exclude=".git/packed-refs" \ --exclude=".git/refs/remotes" \ --exclude=".git/refs/tags" \ --exclude=".idea" \ - --exclude="/.git-credentials" \ - --exclude="/.NERDTreeBookmarks" \ - --exclude="/.Xauthority" \ + --exclude="/.android" \ --exclude="/.bash_history" \ + --exclude="/.cache" \ + --exclude="/.config/chromium" \ + --exclude="/.config/GIMP" \ + --exclude="/.config/inkscape" \ + --exclude="/.config/JetBrains" \ + --exclude="/.config/libreoffice" \ + --exclude="/.config/VirtualBox/compreg.dat" \ --exclude="/.config/VirtualBox/HostInterfaceNetworking-vboxnet0-Dhcpd.leases*" \ --exclude="/.config/VirtualBox/HostInterfaceNetworking-vboxnet0-Dhcpd.log*" \ - --exclude="/.config/VirtualBox/VBoxSVC.log*" \ - --exclude="/.config/VirtualBox/compreg.dat" \ --exclude="/.config/VirtualBox/selectorwindow.log*" \ --exclude="/.config/VirtualBox/vbox-ssl-cacertificate.crt" \ + --exclude="/.config/VirtualBox/VBoxSVC.log*" \ --exclude="/.config/VirtualBox/xpti.dat" \ - --exclude="/.config/libreoffice" \ - --exclude="/.config/GIMP" \ - --exclude="/.config/JetBrains" \ --exclude="/.gdb_history" \ + --exclude="/.git-credentials" \ + --exclude="/.gmrun_history" \ --exclude="/.lesshst" \ - --exclude="/.xsession-errors" \ - --exclude="/.xsession-errors.old" \ - --exclude="/mnt" \ - --exclude="/.android" \ - --exclude="/.cache" \ - --exclude="/.config/chromium" \ - --exclude="/.config/inkscape" \ --exclude="/.local/share" \ --exclude="/.m2/repository" \ --exclude="/.mozilla/firefox" \ + --exclude="/.NERDTreeBookmarks" \ + --exclude="/.recently-used" \ + --exclude="/.sqlite_history" \ --exclude="/.squirrel-sql" \ --exclude="/.viking-maps" \ - --exclude="/Downloads" \ + --exclude="/.viminfo" \ + --exclude="/.Xauthority" \ + --exclude="/.xsession-errors" \ + --exclude="/.xsession-errors.old" \ --exclude="/crashdumps" \ + --exclude="/Downloads" \ --exclude="/images" \ + --exclude="/mnt" \ --exclude="/projects/forks" \ - --exclude="cee-misc-lib/external" \ - --exclude="cee-misc-lib/tmp" \ --exclude="/tmp" \ --exclude="/virtualbox-*" \ - --exclude="/vmshare" \ --exclude="/vm-qemu" \ + --exclude="/vmshare" \ + --exclude="cee-misc-lib/external" \ + --exclude="cee-misc-lib/tmp" \ "${DIR_FROM:?}" \ "${BACKUP_PATH:?}/${DST_PREFIX}" \ ; -- cgit v1.1 From b7227a36cc0a825a5003085a1726dcd7407b5743 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 27 Sep 2023 23:12:33 +0200 Subject: Add notes about gpg. Add another java stream util. --- doc/note/bash/bash-on-windoof.txt | 6 ++++++ doc/note/gpg/gpg-windoof.txt | 5 +++++ .../unspecifiedgarbage/stream/StreamUtils.java | 15 +++++++++++++++ 3 files changed, 26 insertions(+) create mode 100644 doc/note/bash/bash-on-windoof.txt create mode 100644 doc/note/gpg/gpg-windoof.txt diff --git a/doc/note/bash/bash-on-windoof.txt b/doc/note/bash/bash-on-windoof.txt new file mode 100644 index 0000000..32c0ee3 --- /dev/null +++ b/doc/note/bash/bash-on-windoof.txt @@ -0,0 +1,6 @@ + + +## Stop silly path replacements + + MSYS_NO_PATHCONV=1 ssh foo -- ls /var/lib + diff --git a/doc/note/gpg/gpg-windoof.txt b/doc/note/gpg/gpg-windoof.txt new file mode 100644 index 0000000..e883cbf --- /dev/null +++ b/doc/note/gpg/gpg-windoof.txt @@ -0,0 +1,5 @@ + + +[Why does git complain that no GPG agent is running?](https://superuser.com/a/1663941/1123359) + + diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java index 889b3f1..bebe970 100644 --- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java @@ -25,6 +25,21 @@ public class StreamUtils { return totalBytes; } + public static Runnable newCopyTask(java.io.InputStream src, java.io.OutputStream dst, boolean doCloseDst){ + return ()->{ + try{ + for( byte[] buf = new byte[8291] ;; ){ + int readLen = src.read(buf, 0, buf.length); + if( readLen == -1 ) break; + dst.write(buf, 0, readLen); + } + if( doCloseDst ) dst.close(); + }catch( java.io.IOException ex ){ + throw new RuntimeException(ex); + } + }; + } + public static java.util.Iterator map( java.util.Iterator src , java.util.function.Function mapper ) { return new java.util.Iterator() { @Override public boolean hasNext() { return src.hasNext(); } -- cgit v1.1 From 5ffd1b726b045f90fed0ef4cf959e1766fc5e589 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 30 Sep 2023 02:07:43 +0200 Subject: (ff, qemu) Add some notes --- doc/note/firefox/firefox.txt | 9 +++++++++ doc/note/qemu/qemu.txt | 34 +++++++++++++++++++++++++++------- 2 files changed, 36 insertions(+), 7 deletions(-) diff --git a/doc/note/firefox/firefox.txt b/doc/note/firefox/firefox.txt index e1e2999..9c48e29 100644 --- a/doc/note/firefox/firefox.txt +++ b/doc/note/firefox/firefox.txt @@ -2,6 +2,7 @@ Firefox ================ + ## Install plugin quickNdirty until restart - Visit "about:debugging" @@ -12,6 +13,7 @@ Firefox For refresh, there is a button on the same page to reload the plugin. + ## Create an XPI file (eg for distribution) "manifest.json" MUST be in top level dir inside ZIP. @@ -19,6 +21,7 @@ For refresh, there is a button on the same page to reload the plugin. zip my.xpi manifest.json main.js + ## Distribute via self-hosting Package MUST be signed by "addons.mozilla.org" (Except for ESR or dev @@ -27,3 +30,9 @@ firefox builds) XPI file can be drag-n-drop to FF to trigger install dialog. Or via gear icon "install from file". + + +## Install native manifest (linux) + +"~/.mozilla/native-messaging-hosts/.json" + diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 657faa9..064d833 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -54,26 +54,46 @@ Regular boot ## aarch64 (not working yet) - apt install -y --no-install-recommends qemu-uefi-aarch64 - curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/raw/master/kernel-qemu-4.4.34-jessie + #apt install -y --no-install-recommends qemu-uefi-aarch64 + curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/kernel-qemu-5.10.63-bullseye + curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/versatile-pb-bullseye-5.10.63.dtb + curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/native-emulation/dtbs/bcm2711-rpi-4-b.dtb + curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/kernel-qemu-5.4.51-buster + curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/versatile-pb-buster-5.4.51.dtb curl -sSLO https://downloads.raspberrypi.org/raspios_lite_arm64/images/raspios_lite_arm64-2023-05-03/2023-05-03-raspios-bullseye-arm64-lite.img.xz xz -d 2023-05-03-raspios-bullseye-arm64-lite.img.xz + echo p | /sbin/fdisk 2023-05-03-raspios-bullseye-arm64-lite.img | egrep 'Linux$' | sed -E 's:^\S+\s+([0-9]+) .*$:\nmount -o offset=$(expr \1 \\* 512) ./2023-05-03-raspios-bullseye-arm64-lite.img /mnt/foo:' qemu-img convert -f raw -O qcow2 2023-05-03-raspios-bullseye-arm64-lite.img raspbian-bullseye-lite.qcow2 qemu-img resize raspbian-bullseye-lite.qcow2 16G mv raspbian-bullseye-lite.qcow2 hda.qcow2 qemu-system-aarch64 \ - -bios /usr/share/qemu-efi-aarch64/QEMU_EFI.fd \ - -hda hda.qcow2 \ - -cpu cortex-a72 -m 256 \ + -m 256 -cpu arm1176 \ -M versatilepb \ -no-reboot \ -serial stdio \ -net nic -net user \ + -drive file=2023-05-03-raspios-bullseye-arm64-lite.img,format=raw \ + -boot 'dtb=versatile-pb-bullseye-5.10.63.dtb,kernel=kernel-qemu-5.10.63-bullseye,kernel_args=root=/dev/vda2 panic=1' \ + qemu-system-aarch64 \ + -dtb ./bcm2711-rpi-4-b.dtb \ + -m 256 -cpu arm1176 -M versatilepb \ + -kernel kernel-qemu-5.10.63-bullseye -append "root=/dev/sda2 rootfstype=ext4 rw" \ + -serial stdio \ + -drive file=2023-05-03-raspios-bullseye-arm64-lite.img,format=raw \ + -net nic -net user \ + -no-reboot \ + + qemu-system-arm \ + -M versatilepb \ -cpu arm1176 -m 256 \ - -net tap,ifname=vnet0,script=no,downscript=no \ - -bios /usr/share/ovmf/OVMF.fd \ + -drive "file=2023-05-03-raspios-bullseye-arm64-lite.img,if=none,index=0,media=disk,format=raw,id=disk0" \ + -device "virtio-blk-pci,drive=disk0,disable-modern=on,disable-legacy=off" \ + -net "user,hostfwd=tcp::5022-:2222" \ + -dtb "./versatile-pb-buster-5.4.51.dtb" \ + -kernel "./kernel-qemu-5.4.51-buster" -append 'root=/dev/vda2 panic=1' \ + -no-reboot ## Shared host directory via CIFS/SMB -- cgit v1.1 From 61b684aea4c6bedbe6d07cccb438927c7aac4761 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 30 Sep 2023 02:08:46 +0200 Subject: Add notes about how to build misc libs eg for qemu or other systems. --- doc/note/qemu/build-cJSON.txt | 76 +++++++++++++++++++++++++++++ doc/note/qemu/build-libarchive.txt | 72 +++++++++++++++++++++++++++ doc/note/qemu/build-libcurl.txt | 99 ++++++++++++++++++++++++++++++++++++++ doc/note/qemu/build-libpcre1.txt | 70 +++++++++++++++++++++++++++ doc/note/qemu/build-lua.txt | 86 +++++++++++++++++++++++++++++++++ doc/note/qemu/build-sqlite.txt | 77 +++++++++++++++++++++++++++++ doc/note/qemu/build-zlib.txt | 73 ++++++++++++++++++++++++++++ 7 files changed, 553 insertions(+) create mode 100644 doc/note/qemu/build-cJSON.txt create mode 100644 doc/note/qemu/build-libarchive.txt create mode 100644 doc/note/qemu/build-libcurl.txt create mode 100644 doc/note/qemu/build-libpcre1.txt create mode 100644 doc/note/qemu/build-lua.txt create mode 100644 doc/note/qemu/build-sqlite.txt create mode 100644 doc/note/qemu/build-zlib.txt diff --git a/doc/note/qemu/build-cJSON.txt b/doc/note/qemu/build-cJSON.txt new file mode 100644 index 0000000..93d9496 --- /dev/null +++ b/doc/note/qemu/build-cJSON.txt @@ -0,0 +1,76 @@ + +### Debian native +true \ + && PKGS_TO_ADD="ca-certificates gcc libc6-dev" \ + && SUDO=sudo \ + && PKGINIT="$SUDO apt update" \ + && PKGADD="$SUDO apt install -y --no-install-recommends" \ + && PKGCLEAN="$SUDO apt clean" \ + && HOST= \ + && true + + +### Alpine mingw cross +true \ + && PKGS_TO_ADD="curl mingw-w64-gcc tar" \ + && SUDO="/home/$USER/.local/bin/sudo" \ + && PKGINIT=true \ + && PKGADD="$SUDO apk add" \ + && PKGCLEAN="$SUDO apk clean" \ + && HOST=x86_64-w64-mingw32 \ + && true + + +# Generic +true \ + && CJSON_VERSION="1.7.15" \ + && CACHE_DIR="/var/tmp" \ + && true + + +## Make +true \ + && CJSON_URL="https://github.com/DaveGamble/cJSON/archive/refs/tags/v${CJSON_VERSION:?}.tar.gz" \ + && CJSON_SRCTGZ="${CACHE_DIR:?}/cJSON-${CJSON_VERSION:?}.tgz" \ + && CJSON_BINTGZ="${CJSON_SRCTGZ%.*}-bin.tgz" \ + && ${PKGINIT:?} && ${PKGADD:?} ${PKGS_TO_ADD} \ + && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \ + && if test ! -e "${CJSON_SRCTGZ:?}"; then (true \ + && echo "Download \"${CJSON_URL:?}\"" \ + && curl -sSLo "${CJSON_SRCTGZ:?}" "${CJSON_URL:?}" \ + );fi \ + && if test ! -e "${CJSON_BINTGZ:?}"; then (true \ + && printf '\n Build cJSON\n\n' \ + && tar xzf "${CJSON_SRCTGZ:?}" \ + && cd "cJSON-${CJSON_VERSION:?}" \ + && mkdir build build/obj build/lib build/include \ + && CFLAGS="-Wall -pedantic -fPIC" \ + && ${HOST_}cc $CFLAGS -c -o build/obj/cJSON.o cJSON.c \ + && ${HOST_}cc $CFLAGS -shared -o build/lib/libcJSON.so.${CJSON_VERSION:?} build/obj/cJSON.o \ + && unset CFLAGS \ + && (cd build/lib \ + && MIN=${CJSON_VERSION%.*} && MAJ=${MIN%.*} \ + && ln -s libcJSON.so.${CJSON_VERSION:?} libcJSON.so.${MIN:?} \ + && ln -s libcJSON.so.${MIN:?} libcJSON.so.${MAJ} \ + ) \ + && ${HOST_}ar rcs build/lib/libcJSON.a build/obj/cJSON.o \ + && cp -t build/. LICENSE README.md \ + && cp -t build/include/. cJSON.h \ + && rm build/obj -rf \ + && (cd build \ + && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM \ + && tar --owner=0 --group=0 -czf "${CJSON_BINTGZ:?}" * \ + && md5sum -b "${CJSON_BINTGZ:?}" > "${CJSON_BINTGZ:?}.md5" \ + ) \ + && cd .. && rm -rf "cJSON-${CJSON_VERSION:?}" \ + );fi \ + && printf '\n DONE\n\n' + + +## Install +true \ + && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${CJSON_BINTGZ:?}" \ + && true + + + diff --git a/doc/note/qemu/build-libarchive.txt b/doc/note/qemu/build-libarchive.txt new file mode 100644 index 0000000..19c4815 --- /dev/null +++ b/doc/note/qemu/build-libarchive.txt @@ -0,0 +1,72 @@ + +### Debian native +true \ + && PKGS_TO_ADD="make gcc curl ca-certificates libc6-dev cmake" \ + && SUDO=sudo \ + && PKGINIT="$SUDO apt update" \ + && PKGADD="$SUDO apt install -y --no-install-recommends" \ + && PKGCLEAN="$SUDO apt clean" \ + && HOST= \ + && true + + +### Alpine mingw cross +### TODO: test this +true \ + && PKGS_TO_ADD="make mingw-w64-gcc curl tar cmake" \ + && SUDO="/home/$USER/.local/bin/sudo" \ + && PKGINIT=true \ + && PKGADD="$SUDO apk add" \ + && PKGCLEAN="$SUDO apk clean" \ + && HOST=x86_64-w64-mingw32 \ + && true + + +### General +true \ + && LIBARCHIVE_VERSION="3.6.2" \ + && CACHE_DIR="/var/tmp" \ + && true + + +### Make +true \ + && if test -n "$(ls -A)"; then true \ + && printf '\n It is recommended to run this script in an empty dir.\n\n' \ + && false \ + ;fi \ + && LIBARCHIVE_URL="https://github.com/libarchive/libarchive/archive/refs/tags/v${LIBARCHIVE_VERSION:?}.tar.gz" \ + && LIBARCHIVE_SRCTGZ="${CACHE_DIR:?}/libarchive-${LIBARCHIVE_VERSION:?}.tgz" \ + && LIBARCHIVE_BINTGZ="${LIBARCHIVE_SRCTGZ%.*}-bin.tgz" \ + && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \ + && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \ + && if test ! -e "${LIBARCHIVE_SRCTGZ:?}"; then true \ + && echo "Download ${LIBARCHIVE_URL:?}" \ + && curl -sSLo "${LIBARCHIVE_SRCTGZ:?}" "${LIBARCHIVE_URL:?}" \ + ;fi \ + && if test ! -e "${LIBARCHIVE_BINTGZ}"; then (true \ + && printf '\n Build libarchive\n\n' \ + && tar xf "${LIBARCHIVE_SRCTGZ:?}" \ + && cd "libarchive-${LIBARCHIVE_VERSION:?}" \ + && if test -n "$HOST"; then true \ + && CC="${HOST_:?}cc" cmake -D CMAKE_INSTALL_PREFIX="${PWD:?}/build/usr_local" . \ + ;else true \ + && cmake -D CMAKE_INSTALL_PREFIX="${PWD:?}/build/usr_local" . \ + ;fi \ + && make -j$(nproc) && make install \ + && rm -rf build/usr_local/lib/pkgconfig \ + && (cd build/usr_local \ + && find -type f -not -wholename MD5SUM -exec md5sum {} + > MD5SUM \ + && tar --owner=0 --group=0 -czf "${LIBARCHIVE_BINTGZ:?}" * \ + && md5sum -b "${LIBARCHIVE_BINTGZ:?}" > "${LIBARCHIVE_BINTGZ:?}.md5" \ + ) \ + && cd .. && rm -rf "libarchive-${LIBARCHIVE_VERSION:?}" \ + );fi \ + && printf '\n DONE\n\n' + + +## Install +true \ + && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${LIBARCHIVE_BINTGZ:?}" \ + && true + diff --git a/doc/note/qemu/build-libcurl.txt b/doc/note/qemu/build-libcurl.txt new file mode 100644 index 0000000..eea83ec --- /dev/null +++ b/doc/note/qemu/build-libcurl.txt @@ -0,0 +1,99 @@ + + +### Debian native +true \ + && PKGS_TO_ADD="autoconf automake ca-certificates make" \ + && SUDO=sudo \ + && PKGINIT="$SUDO apt update" \ + && PKGADD="$SUDO apt install -y --no-install-recommends" \ + && PKGCLEAN="$SUDO apt clean" \ + && HOST= \ + && true + + +### Alpine mingw cross +true \ + && PKGS_TO_ADD="git make mingw-w64-gcc curl tar cmake autoconf automake libtool m4" \ + && SUDO="/home/$USER/.local/bin/sudo" \ + && PKGINIT=true \ + && PKGADD="$SUDO apk add" \ + && PKGCLEAN="$SUDO apk clean" \ + && HOST=x86_64-w64-mingw32 \ + && true + + +## Generic +true \ + && CURL_VERSION="8.3.0" \ + && CACHE_DIR="/var/tmp" \ + && true + + +## Make +true \ + && if test -n "$(ls -A)"; then true \ + && printf '\n It is recommended to run this script in an empty dir.\n\n' \ + && false \ + ;fi \ + && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \ + && CURL_VERSION_UGLY="$(echo "$CURL_VERSION"|sed 's;\.;_;g')" \ + && CURL_URL="https://github.com/curl/curl/archive/refs/tags/curl-${CURL_VERSION_UGLY:?}.tar.gz" \ + && CURL_SRCTGZ="${CACHE_DIR:?}/curl-${CURL_VERSION:?}.tgz" \ + && CURL_BINTGZ="${CURL_SRCTGZ%.*}-bin.tgz" \ + \ + && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \ + && printf '\n Download Sources\n\n' \ + && if test ! -e "${CURL_SRCTGZ:?}"; then true \ + && echo "Download ${CURL_URL:?}" \ + && curl -sSLo "${CURL_SRCTGZ:?}" "${CURL_URL:?}" \ + ;fi \ + && if test ! -e "${CURL_BINTGZ:?}"; then (true \ + && printf '\n Build curl\n\n' \ + && tar xf "${CURL_SRCTGZ:?}" \ + && cd "curl-curl-${CURL_VERSION_UGLY:?}" \ + && autoreconf -fi \ + && if test -n "$HOST"; then HORSCHT="--host=${HOST:?}";fi \ + && ./configure --prefix="$PWD/build/usr_local" --enable-http --with-nghttp2 --with-nghttp3 \ + --disable-alt-svc --disable-ares --disable-aws --disable-basic-auth \ + --disable-bearer-auth --disable-bindlocal --disable-cookies --disable-curldebug \ + --disable-dateparse --disable-debug --disable-dependency-tracking --disable-dict \ + --disable-digest-auth --disable-dnsshuffle --disable-doh --disable-ech --disable-file \ + --disable-form-api --disable-ftp --disable-get-easy-options --disable-gopher \ + --disable-headers-api --disable-hsts --disable-http-auth --disable-imap --enable-ipv6 \ + --disable-kerberos-auth --disable-largefile --disable-ldap --disable-ldaps \ + --disable-libcurl-option --disable-libtool-lock --enable-manual --disable-mime \ + --disable-mqtt --disable-negotiate-auth --disable-netrc --enable-ntlm --enable-ntlm-wb \ + --disable-openssl-auto-load-config --disable-optimize --disable-pop3 \ + --disable-progress-meter --enable-proxy --disable-pthreads --disable-rt --disable-rtsp \ + --disable-smb --enable-smtp --disable-socketpair --disable-sspi --disable-symbol-hiding \ + --disable-telnet --disable-tftp --disable-threaded-resolver --disable-tls-srp \ + --disable-unix-sockets --disable-verbose --disable-versioned-symbols --disable-warnings \ + --disable-websockets --disable-werror --without-schannel --without-secure-transport \ + --without-amissl --without-ssl --without-openssl --without-gnutls --without-mbedtls \ + --without-wolfssl --without-bearssl --without-rustls --without-test-nghttpx \ + --without-test-caddy --without-test-httpd --without-pic --without-aix-soname \ + --without-gnu-ld --without-sysroot --without-mingw1-deprecated --without-hyper \ + --without-zlib --without-brotli --without-zstd --without-ldap-lib --without-lber-lib \ + --without-gssapi-includes --without-gssapi-libs --without-gssapi \ + --without-default-ssl-backend --without-random --without-ca-bundle --without-ca-path \ + --without-ca-fallback --without-libpsl --without-libgsasl --without-librtmp \ + --without-winidn --without-libidn2 --without-ngtcp2 --without-quiche --without-msh3 \ + --without-zsh-functions-dir --without-fish-functions-dir \ + CFLAGS=-fPIC $HORSCHT \ + && make clean && make -j$(nproc) && make install \ + && (cd build/usr_local \ + && rm -rf share/aclocal bin/curl-config lib/libcurl.la lib/pkgconfig \ + && tar --owner=0 --group=0 -czf "${CURL_BINTGZ:?}" * \ + && md5sum -b "${CURL_BINTGZ:?}" > "${CURL_BINTGZ:?}.md5" \ + ) \ + && cd .. && rm -rf "curl-curl-${CURL_VERSION_UGLY:?}" \ + );fi \ + && printf '\n DONE\n\n' + + +### Install +true \ + && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${CURL_BINTGZ:?}" \ + && true + + diff --git a/doc/note/qemu/build-libpcre1.txt b/doc/note/qemu/build-libpcre1.txt new file mode 100644 index 0000000..affbd3e --- /dev/null +++ b/doc/note/qemu/build-libpcre1.txt @@ -0,0 +1,70 @@ + + +### Debian native +true \ + && PKGS_TO_ADD="git make gcc ca-certificates libc6-dev cmake autoconf automake libtool m4" \ + && SUDO=sudo \ + && PKGINIT="$SUDO apt update" \ + && PKGADD="$SUDO apt install -y --no-install-recommends" \ + && PKGCLEAN="$SUDO apt clean" \ + && HOST= \ + && true + + +### Alpine mingw cross +true \ + && PKGS_TO_ADD="git make mingw-w64-gcc curl tar cmake autoconf automake libtool m4" \ + && SUDO="/home/$USER/.local/bin/sudo" \ + && PKGINIT=true \ + && PKGADD="$SUDO apk add" \ + && PKGCLEAN="$SUDO apk clean" \ + && HOST=x86_64-w64-mingw32 \ + && true + + +## Generic +true \ + && PCRE_VERSION="8.45" \ + && CACHE_DIR="/var/tmp" \ + && true + + +## Make +true \ + && if test -n "$(ls -A)"; then true \ + && printf '\n It is recommended to run this script in an empty dir.\n\n' \ + && false \ + ;fi \ + && PCRE_URL="https://sourceforge.net/projects/pcre/files/pcre/${PCRE_VERSION:?}/pcre-${PCRE_VERSION:?}.tar.gz/download" \ + && PCRE_SRCTGZ="${CACHE_DIR:?}/pcre-${PCRE_VERSION:?}.tgz" \ + && PCRE_BINTGZ="${PCRE_SRCTGZ%.*}-bin.tgz" \ + && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \ + && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \ + && printf '\n Download Dependency Sources\n\n' \ + && if test ! -e "${PCRE_SRCTGZ:?}"; then true \ + && echo "Download ${PCRE_URL:?}" \ + && curl -sSLo "${PCRE_SRCTGZ:?}" "${PCRE_URL:?}" \ + ;fi \ + && if test ! -e "${PCRE_BINTGZ:?}"; then (true \ + && printf '\n Build curl\n\n' \ + && tar xf "${PCRE_SRCTGZ:?}" \ + && cd "pcre-${PCRE_VERSION:?}" \ + && ./configure --prefix="$PWD/build/usr_local" --host=$HOST --disable-cpp --enable-utf \ + && make clean && make -j$(nproc) && make install \ + && (cd build/usr_local \ + && rm -rf lib/libpcre.la lib/pkgconfig lib/libpcreposix.la bin/pcre-config \ + && tar --owner=0 --group=0 -czf "${PCRE_BINTGZ:?}" * \ + && md5sum -b "${PCRE_BINTGZ:?}" > "${PCRE_BINTGZ:?}.md5" \ + ) \ + && cd .. && rm -rf "pcre-${PCRE_VERSION:?}" \ + );fi \ + && printf '\n DONE\n\n' + + +## Install +true \ + && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${PCRE_BINTGZ:?}" \ + && true + + + diff --git a/doc/note/qemu/build-lua.txt b/doc/note/qemu/build-lua.txt new file mode 100644 index 0000000..59c0838 --- /dev/null +++ b/doc/note/qemu/build-lua.txt @@ -0,0 +1,86 @@ + +### Debian native +true \ + && PKGS_TO_ADD="ca-certificates gcc make libc6-dev" \ + && SUDO=sudo \ + && PKGINIT="$SUDO apt update" \ + && PKGADD="$SUDO apt install -y --no-install-recommends" \ + && PKGCLEAN="$SUDO apt clean" \ + && HOST= \ + && true + + +### Alpine mingw cross +true \ + && PKGS_TO_ADD="binutils curl mingw-w64-gcc make tar" \ + && SUDO="/home/$USER/.local/bin/sudo" \ + && PKGINIT=true \ + && PKGADD="$SUDO apk add" \ + && PKGCLEAN="$SUDO apk clean" \ + && HOST=x86_64-w64-mingw32 \ + && true + + +## Generic +true \ + && LUA_VERSION="5.4.3" \ + && CACHE_DIR="/var/tmp" \ + && true + + +## Make +true \ + && if test -n "$(ls -A)"; then true \ + && printf '\n It is recommended to run this script in an empty dir.\n\n' \ + && false \ + ;fi \ + && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \ + && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \ + && LUA_URL="https://www.lua.org/ftp/lua-${LUA_VERSION:?}.tar.gz" \ + && LUA_SRCTGZ="${CACHE_DIR:?}/lua-${LUA_VERSION:?}.tgz" \ + && LUA_BINTGZ="${LUA_SRCTGZ%.*}-bin.tgz" \ + && printf '\n Download Dependency Sources\n\n' \ + && if test ! -e "${LUA_SRCTGZ:?}"; then true \ + && echo "Download ${LUA_URL:?}" \ + && curl -sSLo "${LUA_SRCTGZ:?}" "${LUA_URL:?}" \ + ;fi \ + && if test ! -e "${LUA_BINTGZ:?}"; then (true \ + && printf '\n Build lua\n\n' \ + && tar xf "${LUA_SRCTGZ:?}" \ + && cd "lua-${LUA_VERSION:?}" \ + && mkdir -p build/bin build/include build/lib build/man/man1 \ + && export CFLAGS="-ggdb -Wall -Wextra" \ + && `# Uncomment this line for debugging` \ + && export CFLAGS="$CFLAGS -DLUAI_ASSERT -DLUA_USE_APICHECK" \ + && `# endOf Uncomment` \ + && make clean \ + && if echo "$HOST"|grep -q '\-mingw'; then true \ + && make -j$(nproc) PLAT=mingw \ + CC="${HOST_}gcc -std=gnu99" AR="${HOST_}ar rcu" RANLIB="${HOST_}ranlib" \ + && cp -t build/. README \ + && cp -t build/bin/. src/lua.exe src/luac.exe \ + ;else true \ + && export CFLAGS="$CFLAGS -DLUA_USE_POSIX" \ + && make -j$(nproc) \ + && cp -t build/. README \ + && cp -t build/bin/. src/lua src/luac \ + ;fi \ + && cp -t build/include/. src/lua.h src/luaconf.h src/lualib.h src/lauxlib.h src/lua.hpp \ + && cp -t build/lib/. src/liblua.a \ + && cp -t build/man/man1/. doc/lua.1 doc/luac.1 \ + && (cd build \ + && rm -rf include/lua.hpp \ + && tar --owner=0 --group=0 -czf "${LUA_BINTGZ:?}" * \ + && md5sum -b "${LUA_BINTGZ:?}" > "${LUA_BINTGZ:?}.md5" \ + ) \ + && cd .. && rm -rf "lua-${LUA_VERSION:?}" \ + );fi \ + && printf '\n DONE\n\n' + + +## Install +true \ + && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${LUA_BINTGZ:?}" \ + && true + + diff --git a/doc/note/qemu/build-sqlite.txt b/doc/note/qemu/build-sqlite.txt new file mode 100644 index 0000000..b7b31bd --- /dev/null +++ b/doc/note/qemu/build-sqlite.txt @@ -0,0 +1,77 @@ + +### Debian native +true \ + && PKGS_TO_ADD="ca-certificates gcc libc6-dev make tcl" \ + && SUDO=sudo \ + && PKGINIT="$SUDO apt update" \ + && PKGADD="$SUDO apt install -y --no-install-recommends" \ + && PKGCLEAN="$SUDO apt clean" \ + && HOST= \ + && true + + +### Alpine mingw cross +true \ + && PKGS_TO_ADD="curl gcc musl-dev make mingw-w64-gcc tar tcl" \ + && SUDO="/home/$USER/.local/bin/sudo" \ + && PKGINIT=true \ + && PKGADD="$SUDO apk add" \ + && PKGCLEAN="$SUDO apk clean" \ + && HOST=x86_64-w64-mingw32 \ + && true + + +## Generic +true \ + && SQLITE_VERSION="3.33.0" \ + && CACHE_DIR="/var/tmp" \ + && true + + +## Make +true \ + && SQLITE_URL="https://github.com/sqlite/sqlite/archive/refs/tags/version-${SQLITE_VERSION:?}.tar.gz" \ + && SQLITE_SRCTGZ="${CACHE_DIR:?}/sqlite-${SQLITE_VERSION:?}.tgz" \ + && SQLITE_BINTGZ="${SQLITE_SRCTGZ%.*}-bin.tgz" \ + && ${PKGINIT:?} && ${PKGADD:?} ${PKGS_TO_ADD} \ + && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \ + && if test ! -e "${SQLITE_SRCTGZ:?}"; then (true \ + && echo "Download \"${SQLITE_URL:?}\"" \ + && curl -sSLo "${SQLITE_SRCTGZ:?}" "${SQLITE_URL:?}" \ + );fi \ + && if test ! -e "${SQLITE_BINTGZ:?}"; then (true \ + && printf '\n Build SqLite\n\n' \ + && tar xzf "${SQLITE_SRCTGZ:?}" \ + && cd sqlite-*${SQLITE_VERSION:?} \ + && mkdir build \ + && if echo $HOST|grep -q 'mingw'; then true \ + && ./configure --prefix=${PWD:?}/build --host=${HOST:?} \ + CC=${HOST_}cc CPP=$CPP CXX=$CXX BCC=gcc BEXE=.exe config_TARGET_EXEEXT=.exe \ + && ln -s mksourceid.exe mksourceid \ + && make clean && make -j$(nproc) && make install \ + && (cd build \ + && rm -rf lemon* mksourceid lib/pkgconfig lib/*.la \ + ) \ + ;else true \ + && ./configure --prefix=${PWD:?}/build \ + && make clean && make -j$(nproc) && make install \ + ;fi \ + && cp README.md LICENSE.md VERSION build/. \ + && (cd build \ + && rm -rf lib/libsqlite3.la lib/pkgconfig \ + && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM \ + && tar --owner=0 --group=0 -czf "${SQLITE_BINTGZ:?}" * \ + && md5sum -b "${SQLITE_BINTGZ:?}" > "${SQLITE_BINTGZ:?}.md5" \ + ) \ + && cd .. && rm -rf sqlite-*${SQLITE_VERSION:?} \ + );fi \ + && printf '\n DONE\n\n' + + +## Install +true \ + && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${SQLITE_BINTGZ:?}" \ + && true + + + diff --git a/doc/note/qemu/build-zlib.txt b/doc/note/qemu/build-zlib.txt new file mode 100644 index 0000000..1b6fc26 --- /dev/null +++ b/doc/note/qemu/build-zlib.txt @@ -0,0 +1,73 @@ + +### Debian native +true \ + && PKGS_TO_ADD="ca-certificates gcc libc6-dev make" \ + && SUDO=sudo \ + && PKGINIT="$SUDO apt update" \ + && PKGADD="$SUDO apt install -y --no-install-recommends" \ + && PKGCLEAN="$SUDO apt clean" \ + && HOST= \ + && true + + +### Alpine mingw cross +true \ + && PKGS_TO_ADD="curl make mingw-w64-gcc tar" \ + && SUDO="/home/$USER/.local/bin/sudo" \ + && PKGINIT=true \ + && PKGADD="$SUDO apk add" \ + && PKGCLEAN="$SUDO apk clean" \ + && HOST=x86_64-w64-mingw32 \ + && true + + +## Generic +true \ + && ZLIB_VERSION="1.2.11" \ + && CACHE_DIR="/var/tmp" \ + && true + + +## Make +true \ + && ZLIB_URL="https://downloads.sourceforge.net/project/libpng/zlib/${ZLIB_VERSION:?}/zlib-${ZLIB_VERSION:?}.tar.gz" \ + && ZLIB_SRCTGZ="${CACHE_DIR:?}/zlib-${ZLIB_VERSION:?}.tgz" \ + && ZLIB_BINTGZ="${ZLIB_SRCTGZ%.*}-bin.tgz" \ + && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \ + && if test ! -e "${ZLIB_SRCTGZ:?}"; then (true \ + && echo "Download \"${ZLIB_URL:?}\"" \ + && curl -sSLo "${ZLIB_SRCTGZ:?}" "${ZLIB_URL:?}" \ + );fi \ + && if test ! -e "${ZLIB_BINTGZ:?}"; then (true \ + && printf '\n Build zlib\n\n' \ + && tar xzf "${ZLIB_SRCTGZ:?}" \ + && cd "zlib-${ZLIB_VERSION:?}" \ + && mkdir build \ + && if echo $HOST|grep -q '\-mingw'; then true \ + && export DESTDIR=./build BINARY_PATH=/bin INCLUDE_PATH=/include LIBRARY_PATH=/lib \ + && sed -i "s;^PREFIX =.\*\$;;" win32/Makefile.gcc \ + && make -j$(nproc) -fwin32/Makefile.gcc PREFIX=${HOST_:?} \ + && make -fwin32/Makefile.gcc install PREFIX=${HOST_:?} \ + && unset DESTDIR BINARY_PATH INCLUDE_PATH LIBRARY_PATH \ + ;else true \ + && ./configure --prefix=$PWD/build \ + && make -j$(nproc) && make install \ + ;fi \ + && cp README build/. \ + && (cd build \ + && rm -rf lib/pkgconfig \ + && tar --owner=0 --group=0 -czf "${ZLIB_BINTGZ:?}" * \ + && md5sum -b "${ZLIB_BINTGZ:?}" > "${ZLIB_BINTGZ:?}.md5" \ + ) \ + && cd .. && rm -rf "zlib-${ZLIB_VERSION:?}" \ + );fi \ + && printf '\n DONE\n\n' + + +## Install +true \ + && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${ZLIB_BINTGZ:?}" \ + && true + + + -- cgit v1.1 From abaaf383172cdc3c861f1c867d3522a1e7c04476 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 30 Sep 2023 16:37:31 +0200 Subject: YAYY. libarchive windoof build seems to work now. --- doc/note/qemu/build-libarchive.txt | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/doc/note/qemu/build-libarchive.txt b/doc/note/qemu/build-libarchive.txt index 19c4815..96f95d6 100644 --- a/doc/note/qemu/build-libarchive.txt +++ b/doc/note/qemu/build-libarchive.txt @@ -1,7 +1,8 @@ ### Debian native +### TODO: test this true \ - && PKGS_TO_ADD="make gcc curl ca-certificates libc6-dev cmake" \ + && PKGS_TO_ADD="make gcc curl ca-certificates libc6-dev" \ && SUDO=sudo \ && PKGINIT="$SUDO apt update" \ && PKGADD="$SUDO apt install -y --no-install-recommends" \ @@ -11,13 +12,12 @@ true \ ### Alpine mingw cross -### TODO: test this true \ - && PKGS_TO_ADD="make mingw-w64-gcc curl tar cmake" \ + && PKGS_TO_ADD="make mingw-w64-gcc curl tar" \ && SUDO="/home/$USER/.local/bin/sudo" \ && PKGINIT=true \ && PKGADD="$SUDO apk add" \ - && PKGCLEAN="$SUDO apk clean" \ + && PKGCLEAN="$SUDO apk cache clean 2>&1| grep -v 'ERROR: Package cache is not enabled'" \ && HOST=x86_64-w64-mingw32 \ && true @@ -35,7 +35,7 @@ true \ && printf '\n It is recommended to run this script in an empty dir.\n\n' \ && false \ ;fi \ - && LIBARCHIVE_URL="https://github.com/libarchive/libarchive/archive/refs/tags/v${LIBARCHIVE_VERSION:?}.tar.gz" \ + && LIBARCHIVE_URL="https://github.com/libarchive/libarchive/releases/download/v${LIBARCHIVE_VERSION:?}/libarchive-${LIBARCHIVE_VERSION:?}.tar.gz" \ && LIBARCHIVE_SRCTGZ="${CACHE_DIR:?}/libarchive-${LIBARCHIVE_VERSION:?}.tgz" \ && LIBARCHIVE_BINTGZ="${LIBARCHIVE_SRCTGZ%.*}-bin.tgz" \ && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \ @@ -48,14 +48,14 @@ true \ && printf '\n Build libarchive\n\n' \ && tar xf "${LIBARCHIVE_SRCTGZ:?}" \ && cd "libarchive-${LIBARCHIVE_VERSION:?}" \ - && if test -n "$HOST"; then true \ - && CC="${HOST_:?}cc" cmake -D CMAKE_INSTALL_PREFIX="${PWD:?}/build/usr_local" . \ - ;else true \ - && cmake -D CMAKE_INSTALL_PREFIX="${PWD:?}/build/usr_local" . \ - ;fi \ - && make -j$(nproc) && make install \ - && rm -rf build/usr_local/lib/pkgconfig \ + && ./configure --prefix="${PWD:?}/build/usr_local" --host=${HOST} \ + --enable-bsdtar=static --enable-bsdcat=static --enable-bsdcpio=static \ + --disable-rpath --enable-posix-regex-lib \ + --with-libiconv-prefix="${PWD%/*}/libiconv-1.16-mingw64" \ + CC=${HOST_}gcc CPP=${HOST_}cpp \ + && make clean && make -j$(nproc) && make install \ && (cd build/usr_local \ + && rm -rf lib/pkgconfig lib/libarchive.la \ && find -type f -not -wholename MD5SUM -exec md5sum {} + > MD5SUM \ && tar --owner=0 --group=0 -czf "${LIBARCHIVE_BINTGZ:?}" * \ && md5sum -b "${LIBARCHIVE_BINTGZ:?}" > "${LIBARCHIVE_BINTGZ:?}.md5" \ -- cgit v1.1 From 7a27a057a6ae162b74d379e868fd0b2fc8247b85 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 4 Oct 2023 21:45:27 +0200 Subject: Cleanup some stuff in notes. --- doc/note/binutils/dumpbin.txt | 4 +++- doc/note/gpg/gpg-windoof.txt | 5 ----- doc/note/gpg/gpg.txt | 3 +++ doc/note/qemu/qemu.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) delete mode 100644 doc/note/gpg/gpg-windoof.txt diff --git a/doc/note/binutils/dumpbin.txt b/doc/note/binutils/dumpbin.txt index e71be0e..638cf8f 100644 --- a/doc/note/binutils/dumpbin.txt +++ b/doc/note/binutils/dumpbin.txt @@ -2,7 +2,9 @@ DumpBin For Windoof =================== -Analyze PE32 / PE32+ files. +## Analyze PE32 / PE32+ files. + +TODO: This is unusable, because this only works with lots of bloat installed. Location: "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\dumpbin.exe" diff --git a/doc/note/gpg/gpg-windoof.txt b/doc/note/gpg/gpg-windoof.txt deleted file mode 100644 index e883cbf..0000000 --- a/doc/note/gpg/gpg-windoof.txt +++ /dev/null @@ -1,5 +0,0 @@ - - -[Why does git complain that no GPG agent is running?](https://superuser.com/a/1663941/1123359) - - diff --git a/doc/note/gpg/gpg.txt b/doc/note/gpg/gpg.txt index 11721f0..0089221 100644 --- a/doc/note/gpg/gpg.txt +++ b/doc/note/gpg/gpg.txt @@ -85,4 +85,7 @@ you're doing! If you don't, you MUST NOT use those instructions! gpgconf --kill gpg-agent gpgconf --launch gpg-agent +[windoof: Why does git complain that no GPG agent is running?](https://superuser.com/a/1663941/1123359) says: + gpg-connect-agent reloadagent /bye + diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 657faa9..2c7eaba 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -38,7 +38,7 @@ Windoof: && BIOSFILE="${BIOSDIR:?}/bios-256k.bin" \ && FIXMOUSEALIGN="-device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet" \ && NETWORK="-net nic -net user" \ - && NETWORK="-device e1000,netdev=net0 -netdev user,id=net0,hostfwd=tcp:127.0.0.1:10022-:22" \ + && NETWORK="-device e1000,netdev=n0 -netdev user,id=n0,hostfwd=tcp:127.0.0.1:2222-:22" \ && HOSTSPECIFICOPTS="--enable-kvm" \ && HOSTSPECIFICOPTS="-L ${BIOSDIR:?} -bios ${BIOSFILE:?}" \ @@ -169,7 +169,7 @@ TODO: move this to a better place. Eg: debian/setup.txt or whatever. && (echo "Acquire::http::proxy \"${http_proxy}\";" echo "Acquire::https::proxy \"${https_proxy}\";" ) | $SUDO tee /etc/apt/apt.conf.d/80proxy >/dev/null \ - fi \ + ;fi \ && $SUDO apt update \ && $SUDO apt install -y --no-install-recommends vim openssh-server net-tools curl \ && $SUDO sed -i -E 's;^GRUB_TIMEOUT=5$;GRUB_TIMEOUT=1;' /etc/default/grub \ -- cgit v1.1 From 0422d0d3c1c72d7136ea711d29142c110da9e553 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 6 Oct 2023 00:22:16 +0200 Subject: (qemu) Whops. Curl was missing in install templates. Added note now to build qemu overlay images. --- doc/note/qemu/build-cJSON.txt | 2 +- doc/note/qemu/build-libcurl.txt | 2 +- doc/note/qemu/build-libpcre1.txt | 2 +- doc/note/qemu/build-lua.txt | 2 +- doc/note/qemu/build-sqlite.txt | 2 +- doc/note/qemu/build-zlib.txt | 2 +- doc/note/qemu/qemu.txt | 5 ++++- 7 files changed, 10 insertions(+), 7 deletions(-) diff --git a/doc/note/qemu/build-cJSON.txt b/doc/note/qemu/build-cJSON.txt index 93d9496..0e8d0df 100644 --- a/doc/note/qemu/build-cJSON.txt +++ b/doc/note/qemu/build-cJSON.txt @@ -1,7 +1,7 @@ ### Debian native true \ - && PKGS_TO_ADD="ca-certificates gcc libc6-dev" \ + && PKGS_TO_ADD="ca-certificates curl gcc libc6-dev" \ && SUDO=sudo \ && PKGINIT="$SUDO apt update" \ && PKGADD="$SUDO apt install -y --no-install-recommends" \ diff --git a/doc/note/qemu/build-libcurl.txt b/doc/note/qemu/build-libcurl.txt index eea83ec..be7b8c2 100644 --- a/doc/note/qemu/build-libcurl.txt +++ b/doc/note/qemu/build-libcurl.txt @@ -2,7 +2,7 @@ ### Debian native true \ - && PKGS_TO_ADD="autoconf automake ca-certificates make" \ + && PKGS_TO_ADD="autoconf automake ca-certificates curl make" \ && SUDO=sudo \ && PKGINIT="$SUDO apt update" \ && PKGADD="$SUDO apt install -y --no-install-recommends" \ diff --git a/doc/note/qemu/build-libpcre1.txt b/doc/note/qemu/build-libpcre1.txt index affbd3e..491809b 100644 --- a/doc/note/qemu/build-libpcre1.txt +++ b/doc/note/qemu/build-libpcre1.txt @@ -2,7 +2,7 @@ ### Debian native true \ - && PKGS_TO_ADD="git make gcc ca-certificates libc6-dev cmake autoconf automake libtool m4" \ + && PKGS_TO_ADD="curl git make gcc ca-certificates libc6-dev cmake autoconf automake libtool m4" \ && SUDO=sudo \ && PKGINIT="$SUDO apt update" \ && PKGADD="$SUDO apt install -y --no-install-recommends" \ diff --git a/doc/note/qemu/build-lua.txt b/doc/note/qemu/build-lua.txt index 59c0838..5440233 100644 --- a/doc/note/qemu/build-lua.txt +++ b/doc/note/qemu/build-lua.txt @@ -1,7 +1,7 @@ ### Debian native true \ - && PKGS_TO_ADD="ca-certificates gcc make libc6-dev" \ + && PKGS_TO_ADD="curl ca-certificates gcc make libc6-dev" \ && SUDO=sudo \ && PKGINIT="$SUDO apt update" \ && PKGADD="$SUDO apt install -y --no-install-recommends" \ diff --git a/doc/note/qemu/build-sqlite.txt b/doc/note/qemu/build-sqlite.txt index b7b31bd..81210f9 100644 --- a/doc/note/qemu/build-sqlite.txt +++ b/doc/note/qemu/build-sqlite.txt @@ -1,7 +1,7 @@ ### Debian native true \ - && PKGS_TO_ADD="ca-certificates gcc libc6-dev make tcl" \ + && PKGS_TO_ADD="curl ca-certificates gcc libc6-dev make tcl" \ && SUDO=sudo \ && PKGINIT="$SUDO apt update" \ && PKGADD="$SUDO apt install -y --no-install-recommends" \ diff --git a/doc/note/qemu/build-zlib.txt b/doc/note/qemu/build-zlib.txt index 1b6fc26..a90e616 100644 --- a/doc/note/qemu/build-zlib.txt +++ b/doc/note/qemu/build-zlib.txt @@ -1,7 +1,7 @@ ### Debian native true \ - && PKGS_TO_ADD="ca-certificates gcc libc6-dev make" \ + && PKGS_TO_ADD="curl ca-certificates gcc libc6-dev make" \ && SUDO=sudo \ && PKGINIT="$SUDO apt update" \ && PKGADD="$SUDO apt install -y --no-install-recommends" \ diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 064d833..b450dad 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -10,7 +10,10 @@ Qemu ## Manage Images ### Create new image - qemu-img create -f qcow2 my_disk.qcow2 16G + qemu-img create -f qcow2 disk.qcow2 16G + +### Create new overlay image + qemu-img create -o backing_file=base.qcow2,backing_fmt=qcow2 -f qcow2 disk.cow ### Convert qcow2 to raw qemu-img convert -f qcow2 -O raw foo.qcow2 foo.img -- cgit v1.1 From 587de64a627966721811ab1e2ec4c81750311e21 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 7 Oct 2023 15:19:51 +0200 Subject: Fix some typos --- doc/note/qemu/qemu.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 56de466..b58b193 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -13,7 +13,7 @@ Qemu qemu-img create -f qcow2 disk.qcow2 16G ### Create new overlay image - qemu-img create -o backing_file=base.qcow2,backing_fmt=qcow2 -f qcow2 disk.cow + qemu-img create -o backing_file=base.qcow2,backing_fmt=qcow2 -f qcow2 disk.qcow2 ### Convert qcow2 to raw qemu-img convert -f qcow2 -O raw foo.qcow2 foo.img @@ -112,7 +112,7 @@ Regular boot TODO: SMB server windoof DoesNotWork: "https://serverfault.com/questions/442664/virtualization-linux-kvm-qemu-host-windows-vm-guest-how-to-access-data-drive#comment479177_442678" - true `# SMB client debian` \ +true `# SMB client debian` \ && hostUsername=yourHostUser \ && smbServer=10.0.2.2 \ && sharename=work \ -- cgit v1.1 From ae26cf821e659df9665c9c223e20b927d201748d Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 7 Oct 2023 21:42:46 +0200 Subject: (qemu) Add doc how to auto mount cifs (smb) share while boot --- doc/note/qemu/qemu.txt | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index b58b193..22f5940 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -125,6 +125,11 @@ true `# SMB client debian` \ && $SUDO mount -t cifs -o username=${hostUsername:?},uid=${guestUid:?},gid=${guestGid:?} "//${smbServer:?}/${sharename:?}" "${mountpoint:?}" \ && true +### Add those in "/etc/fstab" to setup mount automatically at boot: +### HINT: mkdir /home/user/build + //10.0.2.2/sharename /mnt/sharename cifs password=,uid=1000,gid=1000,user 0 0 + /home/user/build /mnt/sharename/build none bind 0 0 + List smb shares (eg debugging) smbclient -NL //10.0.2.2 -- cgit v1.1 From 28a0af6b23193d64de458fa3b9a17453bcae9598 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 11 Oct 2023 19:41:08 +0200 Subject: (links) Minor cleanup --- doc/note/links/links.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 5564be3..7e08812 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -386,10 +386,10 @@ Links (Aka argument amplifiers) ## java assert - [how to enable](https://stackoverflow.com/a/68893479/4415884) -- [What are they for](https://stackoverflow.com/a/298933/4415884) +- [When to use them](https://softwareengineering.stackexchange.com/a/15518/306800) - [What are they for](https://en.wikipedia.org/wiki/Assertion_(software_development)#Assertions_for_run-time_checking) +- [What are they for](https://stackoverflow.com/a/298933/4415884) - [How and when to use them](https://docs.oracle.com/javase/8/docs/technotes/guides/language/assert.html) -- [When to use them](https://softwareengineering.stackexchange.com/questions/15515/when-to-use-assertions-and-when-to-use-exceptions) - [I dont care](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/505/overview?commentId=219173) ## Mensch verblödet, modern tech, IQ, dumm, test -- cgit v1.1 From a7f19d15457b38de719bb3e411ee41e385608724 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 12 Oct 2023 03:04:59 +0200 Subject: Impossible to connect two windoof machines in the same qemu net. --- doc/note/qemu/qemu-networking.txt | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 doc/note/qemu/qemu-networking.txt diff --git a/doc/note/qemu/qemu-networking.txt b/doc/note/qemu/qemu-networking.txt new file mode 100644 index 0000000..4318cb5 --- /dev/null +++ b/doc/note/qemu/qemu-networking.txt @@ -0,0 +1,27 @@ + +## Connect multiple VMs into one network + +TODO: All the (fu**) in here does NOT work, no matter how many RTFM +tutorials I try. + + +# launch one QEMU instance +qemu-system-whatever \ + -device e1000,netdev=n1,mac=52:54:00:12:34:56 \ + -netdev socket,id=n1,mcast=230.0.0.1:1234 \ + ; +# launch another QEMU instance on same "bus" +qemu-system-whatever \ + -device e1000,netdev=n2,mac=52:54:00:12:34:57 \ + -netdev socket,id=n2,mcast=230.0.0.1:1234 \ + ; +# launch yet another QEMU instance on same "bus" +qemu-system-whatever \ + -device e1000,netdev=n3,mac=52:54:00:12:34:58 \ + -netdev socket,id=n3,mcast=230.0.0.1:1234 \ + ; + +## Sources + +- [connect VM networks](https://qemu.weilnetz.de/doc/6.0/system/invocation.html#sec-005finvocation) + -- cgit v1.1 From 592e97ce53e944a04e92a0b49d5f8725684805cc Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 15 Oct 2023 17:26:38 +0200 Subject: (qemu) Doc how to setup socket mcast networks --- doc/note/qemu/qemu-networking.txt | 27 --------------------------- doc/note/qemu/qemu.txt | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 27 deletions(-) delete mode 100644 doc/note/qemu/qemu-networking.txt diff --git a/doc/note/qemu/qemu-networking.txt b/doc/note/qemu/qemu-networking.txt deleted file mode 100644 index 4318cb5..0000000 --- a/doc/note/qemu/qemu-networking.txt +++ /dev/null @@ -1,27 +0,0 @@ - -## Connect multiple VMs into one network - -TODO: All the (fu**) in here does NOT work, no matter how many RTFM -tutorials I try. - - -# launch one QEMU instance -qemu-system-whatever \ - -device e1000,netdev=n1,mac=52:54:00:12:34:56 \ - -netdev socket,id=n1,mcast=230.0.0.1:1234 \ - ; -# launch another QEMU instance on same "bus" -qemu-system-whatever \ - -device e1000,netdev=n2,mac=52:54:00:12:34:57 \ - -netdev socket,id=n2,mcast=230.0.0.1:1234 \ - ; -# launch yet another QEMU instance on same "bus" -qemu-system-whatever \ - -device e1000,netdev=n3,mac=52:54:00:12:34:58 \ - -netdev socket,id=n3,mcast=230.0.0.1:1234 \ - ; - -## Sources - -- [connect VM networks](https://qemu.weilnetz.de/doc/6.0/system/invocation.html#sec-005finvocation) - diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index b58b193..0abcd1d 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -32,6 +32,39 @@ Windoof: qemu-img convert -O qcow2 input.qcow output.qcow2 +## Example Params (Usage: CopyPaste, then delege what is not needed) +qemu-system-x86_64 \ + -enable-kvm -m size=4G -smp cores=$(nproc) \ + -monitor stdio \ + `# Drives & Boot.` \ + -boot order=dc \ + -cdrom "path/to/cd.iso" \ + -hda "$(dirname "$(realpath "$0")")/hda.qcow2" \ + `# 10.0.2.x network with host redirect` \ + -netdev user,id=n0,hostfwd=tcp:127.0.0.1:${SSH_PORT:-2222}-:22 \ + -device e1000,netdev=n0 \ + `# socket mcast shared network adapter` \ + -netdev socket,id=n1,mcast=230.0.0.1:1234 \ + -device e1000,netdev=n1 \ + `# Fix broken host systems` \ + -L "${QEMU_HOME:?}/Bios" -bios "${QEMU_HOME:?}/Bios/bios-256k.bin" \ + -device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet \ + `# Choose ONE of those for graphic output` \ + -nographic \ + -display gtk \ + -display sdl \ + ; + +### Example manual adapter setup (inside VM) for socket mcast network: +true \ + && ADDR=192.168.42.101/24 \ + && DEV=ens4 \ + && SUDO=sudo \ + && $SUDO ip a add dev "${DEV:?}" "${ADDR:?}" \ + && $SUDO ip link set "${DEV:?}" up \ + && true + + ## amd64 # Choose whichever fits the need @@ -217,4 +250,5 @@ TODO: move this to a better place. Eg: debian/setup.txt or whatever. - [qemu monitor via telnet](https://unix.stackexchange.com/a/426951/292722) - [qemu monitor via stdio](https://unix.stackexchange.com/a/57835/292722) - [qemu raspberry pi TODO](https://blog.agchapman.com/using-qemu-to-emulate-a-raspberry-pi/) +- [connect VM networks](https://qemu.weilnetz.de/doc/6.0/system/invocation.html#sec-005finvocation) -- cgit v1.1 From 52b257d55141ce498f890b49b5092caccc256e6d Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 17 Oct 2023 01:34:54 +0200 Subject: (qemu) Migrate gateleen from docker to qemu --- doc/note/qemu/build-gateleen.txt | 69 +++++++++++++++++++++++++++++++++++++ src/main/docker/gateleen.Dockerfile | 65 ---------------------------------- 2 files changed, 69 insertions(+), 65 deletions(-) create mode 100644 doc/note/qemu/build-gateleen.txt delete mode 100644 src/main/docker/gateleen.Dockerfile diff --git a/doc/note/qemu/build-gateleen.txt b/doc/note/qemu/build-gateleen.txt new file mode 100644 index 0000000..77dcab8 --- /dev/null +++ b/doc/note/qemu/build-gateleen.txt @@ -0,0 +1,69 @@ + + +### Alpine +true \ + && PKGS_TO_ADD="curl maven nodejs npm redis openjdk11-jre-headless" \ + && SUDO="${HOME:?}/.local/bin/mysudo" \ + && PKGINIT=true \ + && PKGADD="$SUDO apk add" \ + && PKGCLEAN=true \ + && true + + +### Generic +true \ + && GATELEEN_GIT_TAG="v1.3.28" \ + && WORKDIR="/${HOME:?}/work" \ + && CACHE_DIR="/var/tmp" \ + && true + + +## Setup Dependencies & get sources +true \ + && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \ + \ + && curl -sSL https://github.com/swisspush/gateleen/archive/refs/tags/"${GATELEEN_GIT_TAG:?}".tar.gz > "${CACHE_DIR:?}/gateleen-${GATELEEN_GIT_TAG:?}.tgz" \ + && true + + +### Make +true \ + && mkdir -p "${WORKDIR:?}/gateleen" && cd "${WORKDIR:?}/gateleen" \ + && tar --strip-components 1 -xf "${CACHE_DIR:?}/gateleen-${GATELEEN_GIT_TAG:?}.tgz" \ + && (cd gateleen-hook-js && npm install) \ + && mkdir -p gateleen-hook-js/node/node_modules/npm/bin \ + && ln -s /usr/bin/node gateleen-hook-js/node/node \ + && printf "require('/usr/lib/node_modules/npm/lib/cli.js')\n" | tee gateleen-hook-js/node/node_modules/npm/bin/npm-cli.js >/dev/null \ + && mvn install -PpublicRepos -DskipTests -Dskip.installnodenpm -pl gateleen-hook-js \ + && mvn install -PpublicRepos -DfailIfNoTests=false \ + -pl !gateleen-test,!gateleen-hook-js \ + -Dtest=!ReleaseLockLuaScriptTests,!RedisCacheStorageTest,!DeltaHandlerTest,!QueueCircuitBreakerCloseCircuitLuaScriptTests,!QueueCircuitBreakerGetAllCircuitsLuaScriptTests,!QueueCircuitBreakerHalfOpenCircuitsLuaScriptTests,!QueueCircuitBreakerReOpenCircuitLuaScriptTests,!QueueCircuitBreakerUpdateStatsLuaScriptTests,!RemoveExpiredQueuesLuaScriptTests,!StartQueueTimerLuaScriptTests \ + && mkdir "${WORKDIR:?}/classpath" \ + && (cd gateleen-playground && mvn dependency:copy-dependencies \ + -DexcludeScope=provided -DoutputDirectory="${WORKDIR:?}/classpath/.") \ + && cp gateleen-playground/target/gateleen-playground-*.jar "${WORKDIR:?}/classpath/." \ + && mkdir "${WORKDIR:?}/etc" "${WORKDIR:?}/redis-state" \ + && printf >"${WORKDIR:?}/etc/redis.conf" '%s\n' \ + 'save ""' \ + 'appendonly yes' \ + 'appendfilename appendonly.aof' \ + && `# Squeeze those funny "static files" into redis` \ + && (cd "${WORKDIR:?}/redis-state" && redis-server "${WORKDIR:?}/etc/redis.conf" \ + & java -cp "${WORKDIR:?}/classpath/"'*' org.swisspush.gateleen.playground.Server \ + & sleep 3 \ + ) \ + && (cd "${WORKDIR:?}/gateleen" && mvn deploy -PuploadStaticFiles) \ + && pkill -TERM java && pkill -INT redis-server \ + && $PKGDEL $PKGS_TO_DEL \ + && $PKGCLEAN \ + && printf '\n DONE\n\n' \ + && true + + +### Run +true \ + && ip a | grep inet \ + && (cd "${WORKDIR:?}/redis-state" && redis-server "${WORKDIR:?}/etc/redis.conf") \ + & java -cp "${WORKDIR:?}/classpath/"'*' org.swisspush.gateleen.playground.Server \ + && true + diff --git a/src/main/docker/gateleen.Dockerfile b/src/main/docker/gateleen.Dockerfile deleted file mode 100644 index f604dc2..0000000 --- a/src/main/docker/gateleen.Dockerfile +++ /dev/null @@ -1,65 +0,0 @@ -# -# A Gateleen playground instance. -# - -ARG PARENT_IMAGE=alpine:3.16.0 -FROM $PARENT_IMAGE - -ARG GATELEEN_GIT_TAG=v1.3.28 -ARG UID=1000 -ARG GID=1000 -ARG PKGS_TO_ADD="maven nodejs npm curl redis openjdk11-jre-headless" -#ARG PKGS_TO_DEL="maven nodejs npm" -ARG PKGS_TO_DEL="nodejs npm" -ARG PKGINIT="true" -ARG PKGADD="apk add" -ARG PKGDEL="true" -ARG PKGCLEAN="true" - -WORKDIR /work - -RUN true \ - && printf 'user:x:%s:%s:user:/work:/bin/sh\n' "${UID:?}" "${GID:?}" >> /etc/passwd \ - && true - -RUN true \ - && $PKGINIT && $PKGADD $PKGS_TO_ADD \ - && sed -i "s,, /data/maven/.m2/repository\n,g" /usr/share/java/maven-3/conf/settings.xml \ - && mkdir /data /data/maven /work/gateleen \ - && chown "${UID:?}:${GID:?}" /data/maven /work /work/gateleen \ - && curl -sSL https://github.com/swisspush/gateleen/archive/refs/tags/"$GATELEEN_GIT_TAG".tar.gz > "/tmp/gateleen-$GATELEEN_GIT_TAG.tgz" \ - && cd /work/gateleen \ - && su user -c 'tar --strip-components 1 -xf /tmp/gateleen-"$GATELEEN_GIT_TAG".tgz' \ - && (cd gateleen-hook-js && su user -c 'npm install') \ - && su user -c 'mkdir -p gateleen-hook-js/node/node_modules/npm/bin' \ - && su user -c 'ln -s /usr/bin/node gateleen-hook-js/node/node' \ - && printf "require('/usr/lib/node_modules/npm/lib/cli.js')\n" | su user -c 'tee gateleen-hook-js/node/node_modules/npm/bin/npm-cli.js' >/dev/null \ - && su user -c 'mvn install -PpublicRepos -DskipTests -Dskip.installnodenpm -pl gateleen-hook-js' \ - && su user -c 'mvn install -PpublicRepos -DfailIfNoTests=false \ - -pl !gateleen-test,!gateleen-hook-js \ - -Dtest=!ReleaseLockLuaScriptTests,!RedisCacheStorageTest,!DeltaHandlerTest,!QueueCircuitBreakerCloseCircuitLuaScriptTests,!QueueCircuitBreakerGetAllCircuitsLuaScriptTests,!QueueCircuitBreakerHalfOpenCircuitsLuaScriptTests,!QueueCircuitBreakerReOpenCircuitLuaScriptTests,!QueueCircuitBreakerUpdateStatsLuaScriptTests,!RemoveExpiredQueuesLuaScriptTests,!StartQueueTimerLuaScriptTests' \ - && mkdir /work/classpath \ - && chown "${UID:?}:${GID:?}" /work/classpath \ - && su user -c 'cd gateleen-playground && mvn dependency:copy-dependencies \ - -DexcludeScope=provided -DoutputDirectory=/work/classpath/.' \ - && cp gateleen-playground/target/gateleen-playground-*.jar /work/classpath/. \ - && mkdir /work/etc \ - && printf >/work/etc/redis.conf '%s\n' \ - 'save ""' \ - 'appendonly yes' \ - 'appenddirname "redis-state"' \ - 'appendfilename appendonly.aof' \ - && (su user -c 'cd /work && redis-server /work/etc/redis.conf & \ - java -cp '"'/work/classpath/*'"' org.swisspush.gateleen.playground.Server' \ - & sleep 3) \ - && su user -c 'cd /work/gateleen && mvn deploy -PuploadStaticFiles' \ - && pkill -INT java && pkill -INT redis-server \ - && $PKGDEL $PKGS_TO_DEL \ - && $PKGCLEAN \ - && true - -USER "${UID}:${GID}" - -#CMD ["sleep", "36000"] -CMD ["sh", "-c", "ip a|grep inet && redis-server /work/etc/redis.conf & java -cp '/work/classpath/*' org.swisspush.gateleen.playground.Server"] - -- cgit v1.1 From fcb84f32536c5577307c440d07484d77f95df3cc Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 17 Oct 2023 02:17:53 +0200 Subject: (qemu) Cleanup, Test new Gateleen build env --- doc/note/qemu/build-gateleen.txt | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/doc/note/qemu/build-gateleen.txt b/doc/note/qemu/build-gateleen.txt index 77dcab8..5e71c05 100644 --- a/doc/note/qemu/build-gateleen.txt +++ b/doc/note/qemu/build-gateleen.txt @@ -7,6 +7,9 @@ true \ && PKGINIT=true \ && PKGADD="$SUDO apk add" \ && PKGCLEAN=true \ + && mkdir -p "${HOME:?}/.local/bin" \ + && printf '%s\n' '#!/bin/sh' 'printf "Sudo "' 'su root -c "$(echo "$@")"' > "${HOME:?}/.local/bin/mysudo" \ + && chmod u+x "${HOME:?}/.local/bin/mysudo" \ && true @@ -53,9 +56,11 @@ true \ & sleep 3 \ ) \ && (cd "${WORKDIR:?}/gateleen" && mvn deploy -PuploadStaticFiles) \ - && pkill -TERM java && pkill -INT redis-server \ + && (pkill -INT java || sleep 3 && pkill -TERM java || sleep 3 && pkill -9 java) \ + && pkill -INT redis-server \ && $PKGDEL $PKGS_TO_DEL \ && $PKGCLEAN \ + && sleep 3 \ && printf '\n DONE\n\n' \ && true @@ -63,7 +68,11 @@ true \ ### Run true \ && ip a | grep inet \ - && (cd "${WORKDIR:?}/redis-state" && redis-server "${WORKDIR:?}/etc/redis.conf") \ - & java -cp "${WORKDIR:?}/classpath/"'*' org.swisspush.gateleen.playground.Server \ + && (true \ + && (cd "${WORKDIR:?}/redis-state" && redis-server "${WORKDIR:?}/etc/redis.conf") \ + & true \ + && cd ~ \ + && java -cp "${WORKDIR:?}/classpath/"'*' org.swisspush.gateleen.playground.Server \ + ) \ && true -- cgit v1.1 From 45326d08584866373db07af9da3ad43acff854ac Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 17 Oct 2023 02:28:31 +0200 Subject: (qemu) add hint how to merge snapshot images --- doc/note/qemu/qemu.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 0abcd1d..994527c 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -21,6 +21,8 @@ Qemu ### Convert raw to qcow2 qemu-img convert -f raw -O qcow2 foo.img foo.qcow2 +### Create Standalone image based on snapshot image + qemu-img convert -O qcow2 derived.qcow2 standalone.qcow2 ## Shrink img -- cgit v1.1 From a1e4785f2a560ccbe3035d781472399e3847aa11 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 17 Oct 2023 03:06:47 +0200 Subject: (qemu) Gateleen add a 'mvn clean' after build --- doc/note/qemu/build-gateleen.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/note/qemu/build-gateleen.txt b/doc/note/qemu/build-gateleen.txt index 5e71c05..9666aa6 100644 --- a/doc/note/qemu/build-gateleen.txt +++ b/doc/note/qemu/build-gateleen.txt @@ -61,6 +61,7 @@ true \ && $PKGDEL $PKGS_TO_DEL \ && $PKGCLEAN \ && sleep 3 \ + && (cd "${WORKDIR:?}/gateleen" && mvn clean) \ && printf '\n DONE\n\n' \ && true -- cgit v1.1 From 53485eb70d0be9844baa62010269c545a1fe5e5b Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 18 Oct 2023 15:12:57 +0200 Subject: Moved in some log digging scripts from LuaMurksTools @ 5910d44ea4d3127f805ecda647f639cef9e55084 --- src/main/lua/paisa-logs/DigHoustonLogs.lua | 493 +++++++++++++++++++++++++++++ src/main/lua/paisa-logs/PaisaLogParser.lua | 422 ++++++++++++++++++++++++ 2 files changed, 915 insertions(+) create mode 100644 src/main/lua/paisa-logs/DigHoustonLogs.lua create mode 100644 src/main/lua/paisa-logs/PaisaLogParser.lua diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua new file mode 100644 index 0000000..ab4b46a --- /dev/null +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -0,0 +1,493 @@ +--[====================================================================[ + + projDir="C:\path\to\proj\root" + export LUA_PATH="${projDir:?}/lib/?.lua" + lua -W "${projDir:?}/bin/DigHoustonLogs.lua" + + ]====================================================================] + +local PaisaLogParser = require("PaisaLogParser") +local mod = {} + + +function mod.main() + local that = {} + that.printRaw = true + local parser = PaisaLogParser.newLogParser({ + cls = that, + -- Since 2021-09-24 on prod + patternV1 = "DATE STAGE SERVICE LEVEL FILE - MSG", + onLogEntry = mod.onLogEntry, + }) + parser:tryParseLogs(); +end + + +function mod.onLogEntry( log, that ) + if not mod.isTimeRangeOk(that,log) then return end + if not mod.isLevelOk(that,log) then return end + if not mod.acceptedByMisc(that,log) then return end + if mod.isUselessNoise(that,log) then return end + --if not mod.isNotYetReported(that,log) then return end + mod.debugPrintLogEntry( that, log ) +end + + +function mod.isTimeRangeOk( that, log ) + local pass, drop = true, false + --if log.date < "2022-06-20 08:00:00,000" then return drop end + --if log.date > "2022-06-20 08:30:00,000" then return drop end + return pass +end + + +function mod.isLevelOk( that, log ) + local pass, drop = true, false + --if log.level=="TRACE" then return drop end + --if log.level=="DEBUG" then return drop end + --if log.level=="INFO" then return drop end + return pass +end + + +-- All other crap which is neither categorized nor analyzed. +function mod.acceptedByMisc( that, log ) + local pass, drop = true, false + + -- This is when position from naviation have problems. + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("t.ch:7022/brox/from/vehicles/.+Connection refused: ") + then return drop end + + -- This is when brox is offline + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find(" http://%w+.pnet.ch:7022/brox/info Connection refused: %w+.pnet.ch/[%d.]+:7022") + then return drop end + + -- [SDCISA-8231] (closed) + -- Seen 2022-03-10 PROD + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find("http://flanian:8080/flanian/vending/twint/v1/pos/register Problem with backend: You must set the Content%-Length header to be the total size of the message body BEFORE sending any data if you are not using HTTP chunked encoding.") + -- then return drop end + --if log.file=="Forwarder" and log.level=="WARN" + -- and log.raw:find("Failed to read upstream response for 'POST /flanian/vending/twint/v1/pos/register'.+java.lang.IllegalStateException: You must set the Content%-Length header to be the total size of the message body BEFORE sending any data if you are not using HTTP chunked encoding.") + -- then return drop end + + -- [SDCISA-8233] + -- Seen 2022-03-10 PROD + --if log.file=="Forwarder" and log.level=="WARN" + -- and log.msg:find("Failed to 'GET /'") + -- and log.raw:find("io.netty.channel.ConnectTimeoutException: connection timed out: rms.post.wlan%-partner.com") + -- then return drop end + + -- This is when lord is offline + -- Seen 2022-06-20 + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find(" http://%w+.pnet.ch:7023/lord/from/vehicles/%d+/vehicle/v1/profile/contact Connection refused: %w+.pnet.ch/[%d.]+:7023") + -- then return drop end + + -- TODO Analyze + -- Observed 20014 times within 6 hrs (~1/sec) (2021-09-17_12:00 to 2021-09-17_18:00) + -- HINT: Eddie connections issues also have around 20000 occurrences. Maybe related? + -- LastSeen 2021-09-17 + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://eddie%d+:7012/from.houston/[^/]+/eagle/[^ ]+ Response already written. Not sure about the state. Closing server connection for stability reason") + then return drop end + + -- TODO link or create issue + -- HINT: Occurred 774 times within 6 hrs (~2x/min) (2021-09-17_12:00 to 2021-09-17_18:00) + -- Seen 2022-06-20 prod + if log.file=="Utils" and log.level=="ERROR" + and log.msg:find("Exception occurred\n%(TIMEOUT,%-1%) Timed out after waiting 30000%(ms%) for a reply. address: __vertx.reply.+, repliedAddress: nsync%-register%-sync") + then return drop end + + -- [SDCISA-9571] + -- TODO remove this filter + if log.file=="BisectClient" and log.level=="WARN" + and log.msg:find("statusCode=503 received for POST /houston/routes/vehicles//eagle/nsync/v1/query-index",0,true) + then return drop end + + -- TODO Open issues for vehicle putting stuff without vehicleId header + -- NOT seen 2022-08-30 prod + --if log.file=="Forwarder" and log.level=="WARN" + -- and log.msg:find("Problem invoking Header functions: unresolvable '{x-vehicleid}' in expression 'garkbit-vending-data-for-vehicle-{x-vehicleid}'",0,true) + -- then return drop end + --if log.file=="Forwarder" and log.level=="WARN" + -- and log.msg:find("Problem invoking Header functions: unresolvable '{x-vehicleid}' in expression 'garkbit-vending-transaction-data-for-vehicle-{x-vehicleid}'",0,true) + -- then return drop end + + -- TODO Analyze + -- HINT: Occurred 1538 times in 6 hrs (~ 1x per 15sec) (2021-09-17_12:00 to 2021-09-17_18:00) + if log.file=="Forwarder" and log.level=="WARN" + and log.msg:find("Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+VertxException: Connection was closed") + then return drop end + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection was closed") + then return drop end + + -- TODO Analyze + -- FirstSeen 2021-09-17 + -- LastSeen 2022-06-20 + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://pag:8080/pag/user/information/v1/directory/sync/request Timeout") + then return drop end + + -- [SDCISA-9572] pag + -- TODO drop this filter + local hosts = "[8acgilmnpsvwy]+" -- (pag|milliways|vlcn8v) + local ctxts = "[_aegilmopstwy]+" -- (pag|milliways|osm_tiles) + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://"..hosts..":[78]080/"..ctxts.."/.+ Connection was closed") + then return drop end + -- Seen 2022-08-30 prod, 2021-10-25 + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://"..hosts..":8080/"..ctxts.."/.+ Response already written. Not sure about the state. Closing server connection for stability reason") + then return drop end + + -- TODO Analyze. Why do OSM tiles timeout? + -- Seen 2022-06-20 prod, 2021-09-17 + --if log.file=="Forwarder" and log.level=="ERROR" + -- and ( log.msg:find("http://vlcn8v:7080/osm_tiles/%d+/%d+/%d+.png Timeout") -- 2022-06-20 + -- or log.msg:find("http://vlcn8v:7080/osm_tiles/%d+/%d+/%d+.png' Timeout") -- new + -- ) + -- then return drop end + + -- TODO Analyze. + -- Seen 2022-06-20, 2021-09-17 + if log.file=="BisectClient" and log.level=="WARN" + and log.msg:find("statusCode=503 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index") + then return drop end + -- Seen 2022-06-20 PROD + if log.file=="BisectClient" and log.level=="WARN" + and log.msg:find("statusCode=504 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index") + then return drop end + + -- TODO rm filter when fixed + -- [SDCISA-9573] + -- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 + if log.file=="BisectClient" and log.level=="WARN" + and log.msg:find("Index id=slarti%-vehicle%-setup%-sync%-%d+ rootPath=/houston/from/vehicles/%d+/vehicle/setup/v1 size=%d+ not %(nor no more%) ready. Aborting BisectClient") + then return drop end + + -- [SDCISA-9574] + -- TODO rm when resolved + -- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 + if log.file=="Utils" and log.level=="ERROR" + and log.msg:find("Exception occurred\n%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}") + then return drop end + + -- TODO Thought timeout? Can happen. But how often is ok? + local host = "[aghilmostuwy]+" -- (milliways|thought) + -- HINT: Occurred 15 times in 6 hrs (avg 1x per 24min) (2021-09-17_12:00 to 2021-09-17_18:00) + -- Seen 2022-08-30 prod, 2022-06-20 + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://"..host..":8080/"..host.."/vehicleoperation/recording/v1/.+ Timeout") + then return drop end + + -- TODO Analyze + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://preflux:8080/preflux/data/preflux/rollout/hosts/eddie%d+/instances/default/situation Timeout") + then return drop end + + -- TODO Analyze. Why can preflux not handle that? + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find("http://preflux:8080/preflux/from/vehicles/%d+/system/status/v1/system/info Timeout") + -- then return drop end + + -- I guess can happen if backend service not available. + -- Seen 2021-10-25 + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find("[^ ]+ [^ ]+ http://[^:]+:8080/[^/]+/info Timeout") + -- then return drop end + + -- TODO Analyze. + -- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 + if log.file=="RedisQues" and log.level=="WARN" + and log.msg:find("Registration for queue .+ has changed to null") + then return drop end + + -- TODO Why do we have DNS problems within backend itself? + -- Seen 2021-09-17 + --if log.file=="Forwarder" and log.level=="WARN" + -- and log.msg:find("Failed to '[^ ]+ /.+'\n.+SearchDomainUnknownHostException: Search domain query failed. Original hostname: '[^']+' failed to resolve '[^.]+.isa%-houston.svc.cluster.local'") + -- and log.raw:find("Caused by: .+DnsNameResolverTimeoutException: .+ query timed out after 5000 milliseconds") + -- then return drop end + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find("http://[^:]+:[78]080/[^ ]+ Search domain query failed. Original hostname: '[^']+' failed to resolve '[^.]+.isa%-houston.svc.cluster.local'") + -- then return drop end + + -- TODO Analyze + -- HINT: Occurred 3 times in 6 hrs (2021-09-17_12:00 to 2021-09-17_18:00) + -- Seen 2022-06-20 + --if log.file=="ContextImpl" and log.level=="ERROR" + -- and log.msg:find("Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.") + -- then return drop end + + -- [SDCISA-7189] + -- Seen 2021-10-21 PROD + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find("^[^ ]+ [^ ]+ [^ ]+ Problem with backend: null$") + -- then return drop end + --if log.file=="Forwarder" and log.level=="ERROR" + -- and ( log.msg:find("^[^ ]+ [^ ]+ http://rms.post.wlan%-partner.com:80/ Timeout$") + -- or log.msg:find("^[^ ]+ [^ ]+ http://rms.post.wlan%-partner.com:80/ connection timed out: rms.post.wlan%-partner.com/[^ ]+$") + -- or log.msg:find("^[^ ]+ [^ ]+ http://rms.post.wlan%-partner.com:80/ Response already written. Not sure about the state. Closing server connection for stability reason$") + -- ) then return drop end + + -- [SDCISA-7189] + -- Seen 2022-06-20, 2021-10-21 + --if log.file=="Forwarder" and log.level=="ERROR" + -- --and ( log.msg:find("^%%[^ ]{4} [^ ]{32} http://localhost:9089/houston/vehicles/[^/]+/vehicle/backup/v1/executions/[^/]+/backup.zip Timeout%s*$") + -- and ( log.msg:find("^%%[^ ]+ [^ ]+ http://localhost:9089/houston/vehicles/[^/]+/vehicle/backup/v1/executions/[^/]+/backup.zip Timeout%s*$") + -- or log.msg:find("^%%[^ ]+ [^ ]+ http://localhost:9089/houston/vehicles/[^/]+/vehicle/backup/v1/executions/[^/]+/backup.zip Connection was closed$") + -- or log.msg:find("^%%[^ ]+ [^ ]+ http://localhost:9089/houston/vehicles/[^/]+/vehicle/backup/v1/executions/[^/]+/backup.zip Response already written. Not sure about the state. Closing server connection for stability reason$") + -- ) + -- then return drop end + ---- Seen 2022-06-20 + --if log.file=="FilePutter" and log.level=="ERROR" + -- and log.msg:find("^Put file failed:\nio.vertx.core.VertxException: Connection was closed$") + -- then return drop end + ---- Seen 2022-06-20 + --if log.file=="EventEmitter" and log.level=="ERROR" + -- and log.msg:find("Exception thrown in event handler.",0,true) + -- and log.raw:find("java.lang.IllegalStateException: Response is closed\n" + -- .."\tat io.vertx.core.http.impl.HttpServerResponseImpl.checkValid(HttpServerResponseImpl.java:564)\n" + -- .."\tat io.vertx.core.http.impl.HttpServerResponseImpl.end(HttpServerResponseImpl.java:324)\n" + -- .."\tat io.vertx.core.http.impl.HttpServerResponseImpl.end(HttpServerResponseImpl.java:313)\n" + -- .."\tat org.swisspush.reststorage.RestStorageHandler.respondWith(RestStorageHandler.java:699)\n" + -- .."\tat org.swisspush.reststorage.RestStorageHandler.lambda$putResource_storeContentsOfDocumentResource$3(RestStorageHandler.java:477)\n" + -- ,90,true) + -- then return drop end + + -- Seen 2022-06-20 prod, 2021-10-21 prod + -- TODO: link (or create) issue + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find("^%%[^ ]+ [^ ]+ http://preflux:8080/preflux/preflux/executeTask/host/[^/]+/instance/default/task/DOCKER_PULL .+$") + -- and ( log.msg:find("/DOCKER_PULL Timeout",120,true) + -- or log.msg:find("/DOCKER_PULL Connection was closed",120,true) + -- ) + -- then return drop end + + -- [SDCISA-9578] + -- TODO rm when fixed + -- Seen 2022-08-30 prod, 2022-06-20 prod + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find(" http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected .+$") + and ( false + or log.msg:find(" Connection reset by peer",100,true) + or log.msg:find(" Connection was closed",100,true) + or log.msg:find(" Response already written. Not sure about the state. Closing server connection for stability reason",100,true) + ) + then return drop end + + -- TODO analyze + -- Seen 2022-06-20 prod + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find(" http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/timetable/private/v1/trip/state/%w+.xml Connection was closed") + then return drop end + + -- Seen 2021-10-25 + -- TODO Analyze? + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find("[^ ]+ [^ ]+ http://halfrunt:8080/halfrunt/common/metric/v1/vehicles/%d+ Timeout") + -- then return drop end + + -- Not analyzed yet. + -- Seen 2021-10-25 + -- NOT Seen 2022-08-30 + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find("[^ ]+ [^ ]+ http://eddie%d+.pnet.ch:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d.vehicles ") + -- and ( log.msg:find(" Connection reset by peer",120,true) + -- or log.msg:find(" Connection was closed",120,true) + -- ) + -- then return drop end + + -- Gopfrtechu isch ds e schiissig närvegi mäudig! + -- Seen 2022-06-20 prod, 2021-10-25 + --if log.file=="Forwarder" and log.level=="ERROR" + -- and log.msg:find("Response already written. Not sure about the state. Closing server connection for stability reason",0,true) + -- then return drop end + + -- NOT Seen 2022-08-30 + --if (log.file=="Forwarder"and log.level=="WARN")or(log.file=="LocalHttpServerResponse"and log.level=="ERROR") + -- and log.msg:find("non-proper HttpServerResponse occured",0,0) + -- --and log.raw:find("java.lang.IllegalStateException: You must set the Content-Length header to be the total size of the message body BEFORE sending any data if you are not using HTTP chunked encoding.\n\tat org.swisspush.gateleen.core.http.LocalHttpServerResponse.write(LocalHttpServerResponse.java:205") + -- then return drop end + + -- Tyro bullshit. Nothing we could do as tyro is EndOfLife. We have to await his removal. + -- Seen 2022-06-20 + if log.file=="SlicedLoop" and log.level=="WARN" + and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.") + and log.msg:find("SlicedLoop.EventLoopHogException: /houston/deployment/playbook/v1/.expand=4") + then return drop end + + -- TODO analyze + -- Seen 2022-06-20 + if log.file=="SlicedLoop" and log.level=="WARN" + and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.") + and log.msg:find("SlicedLoop.EventLoopHogException: /houston/from/vehicles/%d+/vehiclelink/status/v1/passengercounting/doors.expand=2") + then return drop end + + -- TODO analyze + -- Seen 2022-06-20 + if log.file=="SlicedLoop" and log.level=="WARN" + and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.") + and log.msg:find("SlicedLoop.EventLoopHogException: /houston/timetable/notification/v1/planningareas.expand=3") + then return drop end + + -- TODO analyze + -- Seen 2022-06-20 prod + if log.file=="SlicedLoop" and log.level=="WARN" + and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.") + and log.msg:find("SlicedLoop.EventLoopHogException: /houston/vehicles/%d+/vehicle/backup/v1/executions.expand=2") + then return drop end + + -- TODO analyze + -- Seen 2022-08-30 prod + if log.file=="SlicedLoop" and log.level=="WARN" + and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.+EventLoopHogException.+" + .."/houston/timetable/disruption/v1/areas%?expand=3") + then return drop end + + -- TODO analyze + -- Seen 2022-06-20 prod + --if log.file=="RecursiveRootHandlerBase" and log.level=="ERROR" + -- and log.msg:find("Error in result of sub resource 'listeners' Message: Failed to decode: Unrecognized token 'Forbidden': was expecting %(JSON String, Number, Array, Object or token 'null', 'true' or 'false'%)") + -- then return drop end + + -- TODO create issue + -- Seen 2022-08-30 prod, 2022-06-20 prod + if log.file=="ConnectionBase" and log.level=="ERROR" + and log.msg:find("invalid version format: {") + then return drop end + + -- TODO Analyze + -- Seen 2022-08-30 prod + if log.file=="NSyncVerticle" and log.level=="ERROR" + and log.msg:find("Response%-Exception occurred while placing hook for Index" + .." id=[^ ]+" + .." rootPath=/houston/[cnosty]+/vehicles/%d+/[^ ]+ size=%d+.+VertxException.+ Connection was closed") + then return drop end + + -- TODO Analyze + -- Seen 2022-08-30 prod + if log.file=="HandlerRegistration" and log.level=="ERROR" + and log.msg:find("Failed to handleMessage. address: __vertx.reply.%d+.+IllegalStateException:" + .." Response is closed") + then return drop end + + -- Yet another bullshit msg + -- Seen 2022-08-30 prod + if log.file=="ContextImpl" and log.level=="ERROR" + and log.msg:find("Unhandled exception.+IllegalStateException: Response is closed") + then return drop end + + return pass +end + + +-- Reject all the stuff which I consider to be useless noise. +function mod.isUselessNoise( that, log ) + local pass, drop = false, true + + -- Looks pretty useless as provided ways too few details + -- HINT: Occurred 4 times in 6 hrs (2021-09-17_12:00 to 2021-09-17_18:00) + -- Seen 2022-08-30 prod, 2022-06-20 + if log.file=="ConnectionBase" and log.level=="ERROR" + and log.msg:find("Connection reset by peer",0,true) + then return drop end + + -- Connection timeout because eddie offline + -- HINT: (EachOfTheThree) Occurred ~20000 times in 6 hrs (avg 1x per 1sec) (2021-09-17_12:00 to 2021-09-17_18:00) + if log.file=="Forwarder" and log.level=="WARN" + and log.msg:find("Failed to '[^ ]+ /from%-houston/.+ConnectTimeoutException: connection timed out: eddie") + then return drop end + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://eddie.+:7012/from.houston/.+/eagle/.+connection timed out: eddie.+") + then return drop end + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://eddie[0-9]+:7012/from.houston/.+/eagle/.+ Timeout") + then return drop end + if log.file=="Forwarder" and log.level=="WARN" + then return drop end + + -- Connection reset/refused because eddie offline + if log.file=="Forwarder" and log.level=="WARN" + and log.msg:find("Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+AnnotatedConnectException: Connection refused: eddie%d+.+:7012") + then return drop end + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection refused: eddie%d+.+:7012") + then return drop end + if log.file=="Forwarder" and log.level=="WARN" + and log.msg:find("Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\njava.io.IOException: Connection reset by peer") + then return drop end + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection reset by peer") + then return drop end + + -- Yet another EddieNotReachable (!!FATAL!!) error ... + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find(" Connection refused: eddie",0,true) + then return drop end + + -- Connection Close because eddie offline + if log.file=="BisectClient" and log.level=="ERROR" + and log.msg:find("Exception occurred for POST%-request /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index\n.+VertxException: Connection was closed") + then return drop end + + -- DNS crap for offline eddies + if log.file=="Forwarder" and log.level=="WARN" + and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+\n.+ Search domain query failed. Original hostname: 'eddie%d+' failed to resolve 'eddie%d+%.isa%-houston%.svc%.cluster%.local'") + then return drop end + -- HINT: Occurred 8219 times in 6 hrs (avg 1x per 2.5sec) (2021-09-17_12:00 to 2021-09-17_18:00) + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Search domain query failed. Original hostname: 'eddie%d+' failed to resolve 'eddie%d+") + then return drop end + -- HINT: Occurred 781 times in 6 hrs (avg _x per _sec) (2021-09-17_12:00 to 2021-09-17_18:00) + if log.file=="Forwarder" and log.level=="WARN" + and log.msg:find("Failed to '[^ ]+ /from.houston/%d+/eagle/.+\n.+SearchDomainUnknownHostException: Search domain query failed. Original hostname: 'eddie%d+' failed to resolve 'eddie%d+") + and log.raw:find("Caused by: .+DnsNameResolverTimeoutException: .+ query timed out after 5000 milliseconds") + then return drop end + -- Seen 2022-06-20 prod, 2021-10-25 + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find(" http://%w+:7012/from%-houston/%d+/eagle/nsync/v1/push/.+ Search domain query failed. Original hostname: 'eddie[^']+' failed to resolve 'eddie[%w.-]+'") + then return drop end + -- Occurred 1 times in 6 hrs (avg _x per _sec) (2021-09-17_12:00 to 2021-09-17_18:00) + if log.file=="Forwarder" and log.level=="WARN" + and log.msg:find("Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+UnknownHostException: failed to resolve 'eddie%d+' after %d+ queries") + then return drop end + -- Occurred 1 times in 6 hrs (avg _x per _sec) (2021-09-17_12:00 to 2021-09-17_18:00) + -- Seen 2022-06-20 prod + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/[^ ]+ failed to resolve 'eddie%d+' after %d+ queries") + then return drop end + + -- Some Strange connection limit for Trin + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("http://trin:8080/trin/from/vehicles/%d+/[^ ]+ Connection pool reached max wait queue size of") + then return drop end + if log.file=="Forwarder" and log.level=="WARN" + and log.msg:find("^Failed to 'PUT /trin/from/vehicles/[^ ]+'%s+io.vertx.core.http.ConnectionPoolTooBusyException: Connection pool reached max wait queue size of %d+") + then return drop end + + -- No idea what this msg should tell us. Has no details at all. + -- Seen 2022-08-30 prod + if log.file=="HttpClientRequestImpl" and log.level=="ERROR" + and log.msg:find("VertxException: Connection was closed", 0, true) + then return drop end + + return pass +end + + +function mod.debugPrintLogEntry( that, log ) + if that.printRaw then + print( log.raw ); + else + log:debugPrint() + end +end + + +mod.main() + diff --git a/src/main/lua/paisa-logs/PaisaLogParser.lua b/src/main/lua/paisa-logs/PaisaLogParser.lua new file mode 100644 index 0000000..638e08c --- /dev/null +++ b/src/main/lua/paisa-logs/PaisaLogParser.lua @@ -0,0 +1,422 @@ + +local exports = {} +local mod = {} +local stderr = io.stderr + + +local LogParse = { -- class + line = nil, + log = nil, +} + + +function exports.newLogParser( config ) + return LogParse:new(nil, config ) +end + + +function LogParse:new(o, config) + if not config or type(config.onLogEntry) ~= "function" then + error( "Arg 'config.onLogEntry' must be a function" ) + end + o = o or {}; + setmetatable(o, self); + self.__index = self; + -- Register callbacks + self.cb_cls = config.cls + self.cb_onLogEntry = config.onLogEntry + self.cb_onEnd = config.onEnd + self.cb_onError = config.onError or function(s) + error(s or "nil") + end + self.cb_onWarn = config.onWarn or function(s) + io.stdout:flush() + warn(s) + end + -- END callbacks + mod.setupParserPattern( o, config ) + return o; +end + + +function mod.setupParserPattern( this, c ) + local inputPat + if c.patternV1 then + inputPat = c.patternV1; -- Use the one from parameter. + else + this.cb_onWarn( "No 'c.patternV1' specified. Fallback to internal obsolete one." ) + inputPat = "DATE POD STAGE SERVICE THREAD LEVEL FILE - MSG" + end + local parts = {} + for part in string.gmatch(inputPat,"[^ ]+") do + table.insert( parts, part ) + end + this.parts = parts +end + + +local function writeStderr(...) + local args = table.pack(...) + for i=1,args.n do + io.stderr:write( args[i] or "nil" ) + end +end + + +function LogParse:tryParseLogs() + while true do + self.line = io.read("l"); + if self.line==nil then -- EOF + self:publishLogEntry(); + break; + end + + --io.write( "\nBUF: ", self.line, "\n\n" ); + --io.flush() + + if self.line:match("%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d ") then + -- Looks like the beginning of a new log entry. + self:initLogEntryFromLine(); + elseif self.line:match("^%s+at [^ ]") then + -- Looks like a line from exception stack + self:appendStacktraceLine(); + elseif self.line:match("^%s*Caused by: ") then + -- Looks like a stacktrace 'Caused by' line + self:appendStacktraceLine(); + elseif self.line:match("^%s+Suppressed: ") then + -- Looks like a stacktrace 'Suppressed: ' line + self:appendStacktraceLine(); + elseif self.line:match("^%\t... (%d+) more$") then + -- Looks like folded stacktrace elements + self:appendStacktraceLine(); + else + -- Probably msg containing newlines. + self:appendLogMsg(); + end + + end +end + + +function LogParse:initLogEntryFromLine() + self:publishLogEntry(); + local log = self:getOrNewLogEntry(); + + -- Try some alternative parsers + mod.parseByPattern( self ) + --if log.date==nil then + -- self:parseOpenshiftServiceLogLine(); + --end + --if log.date==nil then + -- self:parseEagleLogLine(); + --end + --if log.date==nil then + -- self:parseJettyServiceLogLine(); + --end + + if log.date==nil then + self.cb_onWarn("Failed to parse log line:\n\n".. self.line .."\n\n", self.cb_cls) + end +end + + +function mod.parseByPattern( this ) + local date, pod, stage, service, thread, level, file, msg, matchr, match + local line = this.line + local log = this:getOrNewLogEntry(); + + -- We can just return on failure. if log is missing, it will report error + -- on caller side. Just ensure that 'date' is nil. + log.date = nil + + local rdPos = 1 + for i,part in ipairs(this.parts) do + if part=="DATE" then + date = line:gmatch("(%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d) ", rdPos)() + if not date or date=="" then return end + rdPos = rdPos + date:len() + --stderr:write("date: "..tostring(date).." (rdPos="..tostring(rdPos)..")\n") + elseif part=="STAGE" then + match = line:gmatch( " +[^%s]+", rdPos)() + if not match then return end + stage = match:gmatch("[^%s]+")() + rdPos = rdPos + match:len() + --stderr:write("stage: "..tostring(stage).." (rdPos="..tostring(rdPos)..")\n") + elseif part=="SERVICE" then + match = line:gmatch(" +[^%s]+", rdPos)() + if not match then return end + service = match:gmatch("[^%s]+")() + rdPos = rdPos + match:len() + --stderr:write("service: "..tostring(service).." (rdPos="..tostring(rdPos)..")\n"); + elseif part=="LEVEL" then + match = line:gmatch(" +[^%s]+", rdPos)() + if not match then return end + level = match:gmatch("[^%s]+")() + if not level:find("^[ABCDEFGINORTUW]+$") then -- [ABCDEFGINORTUW]+ -> (ERROR|WARN|INFO|DEBUG|TRACE) + this.cb_onWarn( "Does not look like a level: "..(level or"nil"), this.cb_cls ) + end + rdPos = rdPos + match:len() + --stderr:write("level: "..tostring(level).." (rdPos="..tostring(rdPos)..")\n"); + elseif part=="FILE" then + match = line:gmatch(" +[^%s]+", rdPos)() + if not match then return end + file = match:gmatch("[^%s]+")() + if file=="WARN" then stderr:write("\n"..tostring(line).."\n\n")error("Doesn't look like a file: "..tostring(file)) end + rdPos = rdPos + match:len() + --stderr:write("file: "..tostring(file).." (rdPos="..tostring(rdPos)..")\n"); + elseif part=="-" then + match = line:gmatch(" +%-", rdPos)() + rdPos = rdPos + match:len(); + --stderr:write("dash (rdPos="..tostring(rdPos)..")\n"); + elseif part=="MSG" then + match = line:gmatch(" +.*$", rdPos)() + if not match then return end + msg = match:gmatch("[^%s].*$")() + rdPos = rdPos + match:len() + --stderr:write("msg: "..tostring(msg).." (rdPos="..tostring(rdPos)..")\n") + elseif part=="POD" then + match = line:gmatch(" +[^%s]+", rdPos)() + if not match then return end + pod = match:gmatch("[^%s]+")() + rdPos = rdPos + match:len() + --stderr:write("pod: "..tostring(pod).." (rdPos="..tostring(rdPos)..")\n") + elseif part=="THREAD" then + match = line:gmatch(" +[^%s]+", rdPos)() + thread = match:gmatch("[^%s]+")() + rdPos = rdPos + match:len() + --stderr:write("thrd: "..tostring(thread).." (rdPos="..tostring(rdPos)..")\n") + end + end + + log.raw = this.line; + log.date = date; + log.pod = pod; + log.stage = stage; + log.service = service; + log.thread = thread; + log.level = level; + log.file = file; + log.msg = msg; +end + + +function LogParse:parseOpenshiftServiceLogLine() + local date, pod, stage, service, thread, level, file, msg + local this = self + local line = this.line + local log = self:getOrNewLogEntry(); + + -- We can just return on failure. if log is missing, it will report error + -- on caller side. Just ensure that 'date' is nil. + log.date = nil + + -- VERSION 3 (Since 2021-09-24 houstonProd) + local rdPos = 1 + -- Date + date = line:gmatch("(%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d)", rdPos)() + if not date then return end + rdPos = rdPos + date:len() + -- Pod + pod = line:gmatch(" (%a+)", rdPos )() + if not pod then return end + rdPos = rdPos + pod:len() + -- stage + stage = line:gmatch( " (%a+)", rdPos)() + if not stage then return end + rdPos = rdPos + stage:len() + -- service + service = line:gmatch( " (%a+)", rdPos)() + if not service then return end + rdPos = rdPos + service:len() + -- thread (this only maybe exists) + thread = line:gmatch( " ([%a%d%-]+)", rdPos)() + -- [ABCDEFGINORTUW]+ -> (ERROR|WARN|INFO|DEBUG|TRACE) + if thread and thread:find("^[ABCDEFGINORTUW]+$") then + thread = nil; -- Does more look like an error level. So do NOT advance + else + rdPos = rdPos + thread:len() + end + -- level + level = line:gmatch( " ([A-Z]+)", rdPos)() + if not level then return end + rdPos = rdPos + level:len() + -- file + file = line:gmatch(" ([^%s]+)", rdPos)() + if not file then return end + rdPos = rdPos + file:len() + -- msg + msg = line:gmatch(" %- (.*)", rdPos)() + if not msg then return end + rdPos = rdPos + msg:len() + + -- VERSION 2 (Since 2021-09-24 prefluxInt) + --local rdPos = 1 + ---- Date + --date = line:gmatch("(%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d)", rdPos)() + --if not date then return end + --rdPos = rdPos + date:len() + ---- Pod + --pod = line:gmatch(" (%a+)", rdPos )() + --if not pod then return end + --rdPos = rdPos + pod:len() + ---- stage + --stage = line:gmatch( " (%a+)", rdPos)() + --if not stage then return end + --rdPos = rdPos + stage:len() + ---- service + --service = line:gmatch( " (%a+)", rdPos)() + --if not service then return end + --rdPos = rdPos + service:len() + ---- thread (this only maybe exists) + --thread = line:gmatch( " ([%a%d%-]+)", rdPos)() + ---- [ABCDEFGINORTUW]+ -> (ERROR|WARN|INFO|DEBUG|TRACE) + --if thread and thread:find("^[ABCDEFGINORTUW]+$") then + -- thread = nil; -- Does more look like an error level. So do NOT advance + --else + -- rdPos = rdPos + thread:len() + --end + ---- level + --level = line:gmatch( " ([A-Z]+)", rdPos)() + --if not level then return end + --rdPos = rdPos + level:len() + ---- file + --file = line:gmatch(" ([^%s]+)", rdPos)() + --if not file then return end + --rdPos = rdPos + file:len() + ---- msg + --msg = line:gmatch(" %- (.*)", rdPos)() + --if not msg then return end + --rdPos = rdPos + msg:len() + + log.raw = self.line; + log.date = date; + log.pod = pod; + log.stage = stage; + log.service = service; + log.thread = thread; + log.level = level; + log.file = file; + log.msg = msg; +end + + +function LogParse:parseEagleLogLine() + local log = self:getOrNewLogEntry(); + local date, stage, service, level, file, msg = self.line:gmatch("" + .."(%d%d%d%d%-%d%d%-%d%d %d%d:%d%d:%d%d,%d%d%d)" -- datetime + .." (%a+)" -- stage + .." (%a+)" -- service + .." (%a+)" -- level + .." ([^%s]+)" -- file + .." %- (.*)" -- msg + )(); + local pod = service; -- just 'mock' it + log.raw = self.line; + log.date = date; + log.service = service; + log.pod = pod; + log.stage = stage; + log.level = level; + log.file = file; + log.msg = msg; +end + + +function LogParse:parseJettyServiceLogLine() + local log = self:getOrNewLogEntry(); + local date, pod, stage, service, level, file, msg = self.line:gmatch("" + .."(%d%d%d%d%-%d%d%-%d%d %d%d:%d%d:%d%d,%d%d%d)" -- datetime + .." (%S+)" -- pod (aka container) + .." (%a+)" -- stage + .." (%a+)" -- service + .." (%a+)" -- level + .." ([^%s]+)" -- file + .." %- (.*)" -- msg + )(); + log.raw = self.line; + log.date = date; + log.pod = pod; + log.stage = stage; + log.service = service; + log.level = level; + log.file = file; + log.msg = msg; +end + + +function LogParse:appendLogMsg() + local log = self:getOrNewLogEntry() + log.msg = log.msg or ""; + log.raw = log.raw or ""; + + log.msg = log.msg .."\n".. self.line; + -- Also append to raw to have the complete entry there. + log.raw = log.raw .."\n".. self.line; +end + + +function LogParse:appendStacktraceLine() + local log = self:getOrNewLogEntry() + log.stack = log.stack or ""; + log.stack = log.stack .."\n".. self.line; + -- Also append to raw to have the complete entry there. + log.raw = log.raw .."\n".. self.line; +end + + +function LogParse:publishLogEntry() + local log = self.log + if not log then + return -- nothing to do + end + if not log.raw then + -- WhatTheHeck?!? + local msg = "InternalError: Collected log unexpectedly empty" + self.cb_onError(msg, self.cb_cls) + error(msg); return + end + self.log = nil; -- Mark as consumed + self.cb_onLogEntry( log, self.cb_cls ) +end + + +function LogParse:getOrNewLogEntry() + self.log = self.log or LogEntry:new(nil) + return self.log +end + + +LogEntry = { + raw, + date, + service, + stack, +} + + +function LogEntry:new(o) + o = o or {}; + setmetatable(o, self); + self.__index = self; + return o; +end + + +function LogEntry:debugPrint() + print( "+- PUBLISH ------------------------------------------------------------" ); + print( "| date ---> ", self.date or "nil" ); + print( "| pod ----> ", self.pod or "nil" ); + print( "| service > ", self.service or "nil" ); + print( "| stage --> ", self.stage or "nil" ); + print( "| thread -> ", self.thread or "nil" ); + print( "| level --> ", self.level or "nil" ); + print( "| file ---> ", self.file or "nil" ); + print( "| msg ----> ", self.msg or "nil" ); + print( "| " ) + io.write( "| RAW: ", self.raw or "nil", "\n" ); + print( "`--------------------" ); +end + + +return exports + -- cgit v1.1 From 1a6908b485027a05eb8bf795c02506b2789de540 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 18 Oct 2023 16:07:52 +0200 Subject: Some log digging --- src/main/lua/paisa-logs/DigHoustonLogs.lua | 43 ++++++++++++++++++++++-------- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua index ab4b46a..89d0b5b 100644 --- a/src/main/lua/paisa-logs/DigHoustonLogs.lua +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -1,8 +1,8 @@ --[====================================================================[ - projDir="C:\path\to\proj\root" - export LUA_PATH="${projDir:?}/lib/?.lua" - lua -W "${projDir:?}/bin/DigHoustonLogs.lua" + projDir='C:\path\to\proj\root' + export LUA_PATH="${projDir:?}/src/main/lua/paisa-logs/?.lua" + lua -W "${projDir:?}/src/main/lua/paisa-logs/DigHoustonLogs.lua" ]====================================================================] @@ -25,27 +25,28 @@ end function mod.onLogEntry( log, that ) if not mod.isTimeRangeOk(that,log) then return end - if not mod.isLevelOk(that,log) then return end - if not mod.acceptedByMisc(that,log) then return end - if mod.isUselessNoise(that,log) then return end + --if not mod.isLevelOk(that,log) then return end + --if not mod.acceptedByMisc(that,log) then return end + --if mod.isUselessNoise(that,log) then return end --if not mod.isNotYetReported(that,log) then return end mod.debugPrintLogEntry( that, log ) end function mod.isTimeRangeOk( that, log ) + -- At 2023-10-18 I observed that houston logs now seem to use a "T" in the datetime. local pass, drop = true, false - --if log.date < "2022-06-20 08:00:00,000" then return drop end - --if log.date > "2022-06-20 08:30:00,000" then return drop end + if log.date <= "2023-10-18T03:00:00,000" then return drop end + if log.date > "2023-10-18T15:00:00,000" then return drop end return pass end function mod.isLevelOk( that, log ) local pass, drop = true, false - --if log.level=="TRACE" then return drop end - --if log.level=="DEBUG" then return drop end - --if log.level=="INFO" then return drop end + if log.level=="TRACE" then return drop end + if log.level=="DEBUG" then return drop end + if log.level=="INFO" then return drop end return pass end @@ -206,6 +207,26 @@ function mod.acceptedByMisc( that, log ) and log.msg:find("Registration for queue .+ has changed to null") then return drop end + -- Reported: SDCISA-10973 + -- Seen: 2023-10-18 prod. + if log.file=="HttpClientRequestImpl" and log.level=="ERROR" + and log.msg:find("The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+/vehicle/backup/v1/executions/[0-9]+/backup.zip for server localhost:9089") + then return drop end + + -- Reported: TODO + -- Seen: 2023-10-18 prod. + if log.file=="Utils" and log.level=="ERROR" + and log.msg:find("Exception occurred\nio.vertx.core.eventbus.ReplyException: Timed out after waiting 30000%(ms%) for a reply. address: __vertx.reply.[0-9]+, repliedAddress: nsync%-re") + then return drop end + + if log.file=="HttpHeaderUtil" and log.level=="ERROR" + and log.msg:find("Keep%-Alive%} values do not match timeout=42 != timeout=120 for request /googleplex/internal/security/login_state") + then return drop end + + if log.file=="Forwarder" and log.level=="ERROR" + and log.msg:find("[%a-z0-9]+ [a-z0-9]+ http://eddie.....:7012/from%-houston/[^/]+/eagle/nsync/v1/push/trillian%-phonebooks%-affiliated%-planning%-area%-[^-]+%-vehicles The timeout period of 30000ms has been exceeded while executing POST /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks%-affiliated%-planning%-area%-[^%-]+-vehicles for server eddie.....:7012") + then return drop end + -- TODO Why do we have DNS problems within backend itself? -- Seen 2021-09-17 --if log.file=="Forwarder" and log.level=="WARN" -- cgit v1.1 From e8217eef7e27fdc5e2187a7d5e72109395f79be9 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 18 Oct 2023 17:32:54 +0200 Subject: Add some links --- doc/note/links/links.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 7e08812..bf28fb1 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -95,6 +95,9 @@ Links (Aka argument amplifiers) - "https://jira.post.ch/browse/SDCISA-5624" - "https://gitit.post.ch/projects/ISA/repos/halfrunt/pull-requests/27/overview?commentId=105541" +## Java dropping exception stack traces is a "feature" +- [](https://stackoverflow.com/a/3010106/4415884) + ## Please don't never not avoid nevative (un)logic - "https://schneide.blog/tag/boolean-statements/" @@ -444,6 +447,7 @@ Links (Aka argument amplifiers) ## FileLogging is a MUST have (kibana is bullsh**) - [example](https://jira.post.ch/browse/SDCISA-8382?focusedCommentId=1554435&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1554435) +- [example](https://jira.post.ch/browse/SDCISA-13655) - [warning](https://jira.post.ch/browse/SDCISA-7230?focusedCommentId=1550476&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1550476) ## Automatic variable dynamic version are evil -- cgit v1.1 From 58b312c27ae253274b994fd6f2b08a2d4cb3df82 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 19 Oct 2023 00:26:06 +0200 Subject: (LogDigger) Refactor to declarative style (not tested yet) --- src/main/lua/paisa-logs/DigHoustonLogs.lua | 670 ++++++++--------------------- src/main/lua/paisa-logs/PaisaLogParser.lua | 7 + 2 files changed, 196 insertions(+), 481 deletions(-) diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua index 89d0b5b..c587d71 100644 --- a/src/main/lua/paisa-logs/DigHoustonLogs.lua +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -1,3 +1,4 @@ +#!/usr/bin/env lua --[====================================================================[ projDir='C:\path\to\proj\root' @@ -7,508 +8,215 @@ ]====================================================================] local PaisaLogParser = require("PaisaLogParser") -local mod = {} +local normalizeIsoDateTime = require("PaisaLogParser").normalizeIsoDateTime +local main, onLogEntry, isWorthToPrint, loadFilters, initFilters -function mod.main() - local that = {} - that.printRaw = true + +local function main() + local that = { + logPattern = "DATE STAGE SERVICE LEVEL FILE - MSG", -- Since 2021-09-24 on prod + printRaw = true, + filters = false, + } + loadFilters(that) + initFilters(that) local parser = PaisaLogParser.newLogParser({ cls = that, - -- Since 2021-09-24 on prod - patternV1 = "DATE STAGE SERVICE LEVEL FILE - MSG", - onLogEntry = mod.onLogEntry, + patternV1 = that.logPattern, + onLogEntry = onLogEntry, }) parser:tryParseLogs(); end -function mod.onLogEntry( log, that ) - if not mod.isTimeRangeOk(that,log) then return end - --if not mod.isLevelOk(that,log) then return end - --if not mod.acceptedByMisc(that,log) then return end - --if mod.isUselessNoise(that,log) then return end - --if not mod.isNotYetReported(that,log) then return end - mod.debugPrintLogEntry( that, log ) +local function loadFilters( that ) + assert(not that.filters) + that.filters = { + + { action = "drop", beforeDate = "2023-10-18 03:00:00.000" }, + { action = "drop", afterDate = "2023-10-18 15:00:00.000" }, + + { action = "drop", level = "TRACE" }, + { action = "drop", level = "DEBUG" }, + { action = "drop", level = "INFO" }, + + ---- [SDCISA-9572] pag + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = "http://[8acgilmnpsvwy]+:[78]080/[_aegilmopstwy]+/.+ Connection was closed", }, + + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = "t.ch:7022/brox/from/vehicles/.+Connection refused: ", }, + + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = " http://%w+.pnet.ch:7022/brox/info Connection refused: %w+.pnet.ch/[%d.]+:7022" }, + + ---- TODO Analyze + ---- Observed 20014 times within 6 hrs (~1/sec) (2021-09-17_12:00 to 2021-09-17_18:00) + ---- HINT: Eddie connections issues also have around 20000 occurrences. Maybe related? + ---- Seen: 2021-09-17 + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = "http://eddie%d+:7012/from.houston/[^/]+/eagle/[^ ]+ Response already written. Not sure about the" + -- .." state. Closing server connection for stability reason", }, + + ---- TODO link or create issue + ---- HINT: Occurred 774 times within 6 hrs (~2x/min) (2021-09-17_12:00 to 2021-09-17_18:00) + ---- Seen: 2022-06-20 prod + --{ action = "drop", file = "Utils", level = "ERROR", + -- msgPat = "Exception occurred\n%(TIMEOUT,%-1%) Timed out after waiting 30000%(ms%) for a reply. address:" + -- .." __vertx.reply.+, repliedAddress: nsync%-register%-sync", }, + + ---- TODO Analyze + ---- HINT: Occurred 1538 times in 6 hrs (~ 1x per 15sec) (2021-09-17_12:00 to 2021-09-17_18:00) + --{ action = "drop", file = "Forwarder", level = "WARN", + -- msgPat = "Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+VertxException: Connection was closed", }, + + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = "http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection was closed", }, + + ---- TODO Analyze + ---- Seen: 2021-09-17, ..., 2022-06-20 + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = "http://pag:8080/pag/user/information/v1/directory/sync/request Timeout", }, + + ---- Seen 2021-10-25, 2022-08-30 prod + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = "http://[8acgilmnpsvwy]+:8080/[_aegilmopstwy]+/.+ Response already written. Not sure about the" + -- .." state. Closing server connection for stability reason", }, + + ---- TODO Analyze. + ---- Seen 2021-09-17, 2022-06-20 + --{ action = "drop", file = "BisectClient", level = "WARN", + -- msgPat = "statusCode=503 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, + ---- Seen 2022-06-20 prod + --{ action = "drop", file = "BisectClient", level = "WARN", + -- msgPat = "statusCode=504 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, + ---- TODO rm filter when fixed + ---- Reported: SDCISA-9573 + ---- Seen: 2022-08-30 prod, 2022-06-20, 2021-09-17 + --{ action = "drop", file = "BisectClient", level = "WARN", + -- msgPat = "Index id=slarti%-vehicle%-setup%-sync%-%d+ rootPath=/houston/from/vehicles/%d+/vehicle/setup/v1 size=%d+ not %(nor no more%) ready. Aborting BisectClient", }, + + ---- TODO Thought timeout? Can happen. But how often is ok? + ---- HINT: Occurred 15 times in 6 hrs (avg 1x per 24min) (2021-09-17_12:00 to 2021-09-17_18:00) + ---- Seen 2022-06-20, 2022-08-30 prod + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = "http://[aghilmostuwy]+:8080/[aghilmostuwy]+/vehicleoperation/recording/v1/.+ Timeout", }, + + ---- Reported: SDCISA-9574 + ---- TODO rm when resolved + ---- Seen: 2021-09-17 2022-06-20, 2022-08-30 prod, + --{ action = "drop", file = "Utils", level = "ERROR", + -- msgPat = "Exception occurred\n%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}", }, + + ---- TODO Analyze + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = "http://preflux:8080/preflux/data/preflux/rollout/hosts/eddie%d+/instances/default/situation Timeout", }, + + ---- TODO Analyze. + ---- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 + --{ action = "drop", file = "RedisQues", level = "WARN", + -- msgPat = "Registration for queue .+ has changed to null", }, + + ---- Reported: SDCISA-10973 + ---- Seen: 2023-10-18 prod. + --{ action = "drop", file = "HttpClientRequestImpl", level = "ERROR", + -- msgPat = "The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+" + -- .."/vehicle/backup/v1/executions/[0-9]+/backup.zip for server localhost:9089", }, + + ---- Reported: TODO + ---- Seen: 2023-10-18 prod. + --{ action = "drop", file = "Utils", level = "ERROR", + -- msgPat = "Exception occurred\nio.vertx.core.eventbus.ReplyException: Timed out after waiting 30000%(ms%) for" + -- .." a reply. address: __vertx.reply.[0-9]+, repliedAddress: nsync%-re", }, + + ---- Seen: 2023-10-18 prod + --{ action = "drop", file = "HttpHeaderUtil", level = "ERROR", + -- msgPat = "Keep%-Alive%} values do not match timeout=42 != timeout=120 for request /googleplex/internal/security/login_state", }, + + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = "[%a-z0-9]+ [a-z0-9]+ http://eddie.....:7012/from%-houston/[^/]+/eagle/nsync/v1/push/trillian" + -- .."%-phonebooks%-affiliated%-planning%-area%-[^-]+%-vehicles The timeout period of 30000ms has been" + -- .." exceeded while executing POST /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks" + -- .."%-affiliated%-planning%-area%-[^%-]+-vehicles for server eddie.....:7012", }, + + ---- Reported: SDCISA-9578 + ---- TODO rm when fixed + ---- Seen 2022-08-30 prod, 2022-06-20 prod + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" + -- .." Connection reset by peer", }, + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" + -- .." Connection was closed", }, + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" + -- .." Response already written. Not sure about the state. Closing server connection for stability reason", }, + + ---- TODO analyze + ---- Seen 2022-06-20 prod + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPat = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/timetable/private/v1/trip/state/%w+.xml Connection was closed", }, + + } end -function mod.isTimeRangeOk( that, log ) - -- At 2023-10-18 I observed that houston logs now seem to use a "T" in the datetime. - local pass, drop = true, false - if log.date <= "2023-10-18T03:00:00,000" then return drop end - if log.date > "2023-10-18T15:00:00,000" then return drop end - return pass +local function initFilters( that ) + for iF = 1, #(that.filters) do + local descr = that.filters[iF] + local beforeDate, afterDate = descr.beforeDate, descr.afterDate + local file, level, msgPat = descr.file, descr.level, descr.msgPat + local filter = { action = descr.action, matches = false, } + filter.matches = function( that, log ) + if file and file ~= log.file then return false end + if level and level ~= log.level then return false end + local logDate = normalizeIsoDateTime(log.date) + if logDate < beforeDate then return false end + if logDate >= afterDate then return false end + if msgPat and not log.msg:find(msgPat) then return false end + return true + end + that.filters[iF] = filter + end end -function mod.isLevelOk( that, log ) - local pass, drop = true, false - if log.level=="TRACE" then return drop end - if log.level=="DEBUG" then return drop end - if log.level=="INFO" then return drop end - return pass +local function onLogEntry( log, that ) + if isWorthToPrint(that, log) then + if that.printRaw then + print(log.raw) + else + log:debugPrint() + end + end end --- All other crap which is neither categorized nor analyzed. -function mod.acceptedByMisc( that, log ) +local function isWorthToPrint( that, log ) local pass, drop = true, false - - -- This is when position from naviation have problems. - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("t.ch:7022/brox/from/vehicles/.+Connection refused: ") - then return drop end - - -- This is when brox is offline - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find(" http://%w+.pnet.ch:7022/brox/info Connection refused: %w+.pnet.ch/[%d.]+:7022") - then return drop end - - -- [SDCISA-8231] (closed) - -- Seen 2022-03-10 PROD - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find("http://flanian:8080/flanian/vending/twint/v1/pos/register Problem with backend: You must set the Content%-Length header to be the total size of the message body BEFORE sending any data if you are not using HTTP chunked encoding.") - -- then return drop end - --if log.file=="Forwarder" and log.level=="WARN" - -- and log.raw:find("Failed to read upstream response for 'POST /flanian/vending/twint/v1/pos/register'.+java.lang.IllegalStateException: You must set the Content%-Length header to be the total size of the message body BEFORE sending any data if you are not using HTTP chunked encoding.") - -- then return drop end - - -- [SDCISA-8233] - -- Seen 2022-03-10 PROD - --if log.file=="Forwarder" and log.level=="WARN" - -- and log.msg:find("Failed to 'GET /'") - -- and log.raw:find("io.netty.channel.ConnectTimeoutException: connection timed out: rms.post.wlan%-partner.com") - -- then return drop end - - -- This is when lord is offline - -- Seen 2022-06-20 - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find(" http://%w+.pnet.ch:7023/lord/from/vehicles/%d+/vehicle/v1/profile/contact Connection refused: %w+.pnet.ch/[%d.]+:7023") - -- then return drop end - - -- TODO Analyze - -- Observed 20014 times within 6 hrs (~1/sec) (2021-09-17_12:00 to 2021-09-17_18:00) - -- HINT: Eddie connections issues also have around 20000 occurrences. Maybe related? - -- LastSeen 2021-09-17 - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://eddie%d+:7012/from.houston/[^/]+/eagle/[^ ]+ Response already written. Not sure about the state. Closing server connection for stability reason") - then return drop end - - -- TODO link or create issue - -- HINT: Occurred 774 times within 6 hrs (~2x/min) (2021-09-17_12:00 to 2021-09-17_18:00) - -- Seen 2022-06-20 prod - if log.file=="Utils" and log.level=="ERROR" - and log.msg:find("Exception occurred\n%(TIMEOUT,%-1%) Timed out after waiting 30000%(ms%) for a reply. address: __vertx.reply.+, repliedAddress: nsync%-register%-sync") - then return drop end - - -- [SDCISA-9571] - -- TODO remove this filter - if log.file=="BisectClient" and log.level=="WARN" - and log.msg:find("statusCode=503 received for POST /houston/routes/vehicles//eagle/nsync/v1/query-index",0,true) - then return drop end - - -- TODO Open issues for vehicle putting stuff without vehicleId header - -- NOT seen 2022-08-30 prod - --if log.file=="Forwarder" and log.level=="WARN" - -- and log.msg:find("Problem invoking Header functions: unresolvable '{x-vehicleid}' in expression 'garkbit-vending-data-for-vehicle-{x-vehicleid}'",0,true) - -- then return drop end - --if log.file=="Forwarder" and log.level=="WARN" - -- and log.msg:find("Problem invoking Header functions: unresolvable '{x-vehicleid}' in expression 'garkbit-vending-transaction-data-for-vehicle-{x-vehicleid}'",0,true) - -- then return drop end - - -- TODO Analyze - -- HINT: Occurred 1538 times in 6 hrs (~ 1x per 15sec) (2021-09-17_12:00 to 2021-09-17_18:00) - if log.file=="Forwarder" and log.level=="WARN" - and log.msg:find("Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+VertxException: Connection was closed") - then return drop end - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection was closed") - then return drop end - - -- TODO Analyze - -- FirstSeen 2021-09-17 - -- LastSeen 2022-06-20 - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://pag:8080/pag/user/information/v1/directory/sync/request Timeout") - then return drop end - - -- [SDCISA-9572] pag - -- TODO drop this filter - local hosts = "[8acgilmnpsvwy]+" -- (pag|milliways|vlcn8v) - local ctxts = "[_aegilmopstwy]+" -- (pag|milliways|osm_tiles) - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://"..hosts..":[78]080/"..ctxts.."/.+ Connection was closed") - then return drop end - -- Seen 2022-08-30 prod, 2021-10-25 - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://"..hosts..":8080/"..ctxts.."/.+ Response already written. Not sure about the state. Closing server connection for stability reason") - then return drop end - - -- TODO Analyze. Why do OSM tiles timeout? - -- Seen 2022-06-20 prod, 2021-09-17 - --if log.file=="Forwarder" and log.level=="ERROR" - -- and ( log.msg:find("http://vlcn8v:7080/osm_tiles/%d+/%d+/%d+.png Timeout") -- 2022-06-20 - -- or log.msg:find("http://vlcn8v:7080/osm_tiles/%d+/%d+/%d+.png' Timeout") -- new - -- ) - -- then return drop end - - -- TODO Analyze. - -- Seen 2022-06-20, 2021-09-17 - if log.file=="BisectClient" and log.level=="WARN" - and log.msg:find("statusCode=503 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index") - then return drop end - -- Seen 2022-06-20 PROD - if log.file=="BisectClient" and log.level=="WARN" - and log.msg:find("statusCode=504 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index") - then return drop end - - -- TODO rm filter when fixed - -- [SDCISA-9573] - -- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 - if log.file=="BisectClient" and log.level=="WARN" - and log.msg:find("Index id=slarti%-vehicle%-setup%-sync%-%d+ rootPath=/houston/from/vehicles/%d+/vehicle/setup/v1 size=%d+ not %(nor no more%) ready. Aborting BisectClient") - then return drop end - - -- [SDCISA-9574] - -- TODO rm when resolved - -- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 - if log.file=="Utils" and log.level=="ERROR" - and log.msg:find("Exception occurred\n%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}") - then return drop end - - -- TODO Thought timeout? Can happen. But how often is ok? - local host = "[aghilmostuwy]+" -- (milliways|thought) - -- HINT: Occurred 15 times in 6 hrs (avg 1x per 24min) (2021-09-17_12:00 to 2021-09-17_18:00) - -- Seen 2022-08-30 prod, 2022-06-20 - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://"..host..":8080/"..host.."/vehicleoperation/recording/v1/.+ Timeout") - then return drop end - - -- TODO Analyze - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://preflux:8080/preflux/data/preflux/rollout/hosts/eddie%d+/instances/default/situation Timeout") - then return drop end - - -- TODO Analyze. Why can preflux not handle that? - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find("http://preflux:8080/preflux/from/vehicles/%d+/system/status/v1/system/info Timeout") - -- then return drop end - - -- I guess can happen if backend service not available. - -- Seen 2021-10-25 - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find("[^ ]+ [^ ]+ http://[^:]+:8080/[^/]+/info Timeout") - -- then return drop end - - -- TODO Analyze. - -- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 - if log.file=="RedisQues" and log.level=="WARN" - and log.msg:find("Registration for queue .+ has changed to null") - then return drop end - - -- Reported: SDCISA-10973 - -- Seen: 2023-10-18 prod. - if log.file=="HttpClientRequestImpl" and log.level=="ERROR" - and log.msg:find("The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+/vehicle/backup/v1/executions/[0-9]+/backup.zip for server localhost:9089") - then return drop end - - -- Reported: TODO - -- Seen: 2023-10-18 prod. - if log.file=="Utils" and log.level=="ERROR" - and log.msg:find("Exception occurred\nio.vertx.core.eventbus.ReplyException: Timed out after waiting 30000%(ms%) for a reply. address: __vertx.reply.[0-9]+, repliedAddress: nsync%-re") - then return drop end - - if log.file=="HttpHeaderUtil" and log.level=="ERROR" - and log.msg:find("Keep%-Alive%} values do not match timeout=42 != timeout=120 for request /googleplex/internal/security/login_state") - then return drop end - - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("[%a-z0-9]+ [a-z0-9]+ http://eddie.....:7012/from%-houston/[^/]+/eagle/nsync/v1/push/trillian%-phonebooks%-affiliated%-planning%-area%-[^-]+%-vehicles The timeout period of 30000ms has been exceeded while executing POST /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks%-affiliated%-planning%-area%-[^%-]+-vehicles for server eddie.....:7012") - then return drop end - - -- TODO Why do we have DNS problems within backend itself? - -- Seen 2021-09-17 - --if log.file=="Forwarder" and log.level=="WARN" - -- and log.msg:find("Failed to '[^ ]+ /.+'\n.+SearchDomainUnknownHostException: Search domain query failed. Original hostname: '[^']+' failed to resolve '[^.]+.isa%-houston.svc.cluster.local'") - -- and log.raw:find("Caused by: .+DnsNameResolverTimeoutException: .+ query timed out after 5000 milliseconds") - -- then return drop end - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find("http://[^:]+:[78]080/[^ ]+ Search domain query failed. Original hostname: '[^']+' failed to resolve '[^.]+.isa%-houston.svc.cluster.local'") - -- then return drop end - - -- TODO Analyze - -- HINT: Occurred 3 times in 6 hrs (2021-09-17_12:00 to 2021-09-17_18:00) - -- Seen 2022-06-20 - --if log.file=="ContextImpl" and log.level=="ERROR" - -- and log.msg:find("Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.") - -- then return drop end - - -- [SDCISA-7189] - -- Seen 2021-10-21 PROD - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find("^[^ ]+ [^ ]+ [^ ]+ Problem with backend: null$") - -- then return drop end - --if log.file=="Forwarder" and log.level=="ERROR" - -- and ( log.msg:find("^[^ ]+ [^ ]+ http://rms.post.wlan%-partner.com:80/ Timeout$") - -- or log.msg:find("^[^ ]+ [^ ]+ http://rms.post.wlan%-partner.com:80/ connection timed out: rms.post.wlan%-partner.com/[^ ]+$") - -- or log.msg:find("^[^ ]+ [^ ]+ http://rms.post.wlan%-partner.com:80/ Response already written. Not sure about the state. Closing server connection for stability reason$") - -- ) then return drop end - - -- [SDCISA-7189] - -- Seen 2022-06-20, 2021-10-21 - --if log.file=="Forwarder" and log.level=="ERROR" - -- --and ( log.msg:find("^%%[^ ]{4} [^ ]{32} http://localhost:9089/houston/vehicles/[^/]+/vehicle/backup/v1/executions/[^/]+/backup.zip Timeout%s*$") - -- and ( log.msg:find("^%%[^ ]+ [^ ]+ http://localhost:9089/houston/vehicles/[^/]+/vehicle/backup/v1/executions/[^/]+/backup.zip Timeout%s*$") - -- or log.msg:find("^%%[^ ]+ [^ ]+ http://localhost:9089/houston/vehicles/[^/]+/vehicle/backup/v1/executions/[^/]+/backup.zip Connection was closed$") - -- or log.msg:find("^%%[^ ]+ [^ ]+ http://localhost:9089/houston/vehicles/[^/]+/vehicle/backup/v1/executions/[^/]+/backup.zip Response already written. Not sure about the state. Closing server connection for stability reason$") - -- ) - -- then return drop end - ---- Seen 2022-06-20 - --if log.file=="FilePutter" and log.level=="ERROR" - -- and log.msg:find("^Put file failed:\nio.vertx.core.VertxException: Connection was closed$") - -- then return drop end - ---- Seen 2022-06-20 - --if log.file=="EventEmitter" and log.level=="ERROR" - -- and log.msg:find("Exception thrown in event handler.",0,true) - -- and log.raw:find("java.lang.IllegalStateException: Response is closed\n" - -- .."\tat io.vertx.core.http.impl.HttpServerResponseImpl.checkValid(HttpServerResponseImpl.java:564)\n" - -- .."\tat io.vertx.core.http.impl.HttpServerResponseImpl.end(HttpServerResponseImpl.java:324)\n" - -- .."\tat io.vertx.core.http.impl.HttpServerResponseImpl.end(HttpServerResponseImpl.java:313)\n" - -- .."\tat org.swisspush.reststorage.RestStorageHandler.respondWith(RestStorageHandler.java:699)\n" - -- .."\tat org.swisspush.reststorage.RestStorageHandler.lambda$putResource_storeContentsOfDocumentResource$3(RestStorageHandler.java:477)\n" - -- ,90,true) - -- then return drop end - - -- Seen 2022-06-20 prod, 2021-10-21 prod - -- TODO: link (or create) issue - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find("^%%[^ ]+ [^ ]+ http://preflux:8080/preflux/preflux/executeTask/host/[^/]+/instance/default/task/DOCKER_PULL .+$") - -- and ( log.msg:find("/DOCKER_PULL Timeout",120,true) - -- or log.msg:find("/DOCKER_PULL Connection was closed",120,true) - -- ) - -- then return drop end - - -- [SDCISA-9578] - -- TODO rm when fixed - -- Seen 2022-08-30 prod, 2022-06-20 prod - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find(" http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected .+$") - and ( false - or log.msg:find(" Connection reset by peer",100,true) - or log.msg:find(" Connection was closed",100,true) - or log.msg:find(" Response already written. Not sure about the state. Closing server connection for stability reason",100,true) - ) - then return drop end - - -- TODO analyze - -- Seen 2022-06-20 prod - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find(" http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/timetable/private/v1/trip/state/%w+.xml Connection was closed") - then return drop end - - -- Seen 2021-10-25 - -- TODO Analyze? - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find("[^ ]+ [^ ]+ http://halfrunt:8080/halfrunt/common/metric/v1/vehicles/%d+ Timeout") - -- then return drop end - - -- Not analyzed yet. - -- Seen 2021-10-25 - -- NOT Seen 2022-08-30 - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find("[^ ]+ [^ ]+ http://eddie%d+.pnet.ch:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d.vehicles ") - -- and ( log.msg:find(" Connection reset by peer",120,true) - -- or log.msg:find(" Connection was closed",120,true) - -- ) - -- then return drop end - - -- Gopfrtechu isch ds e schiissig närvegi mäudig! - -- Seen 2022-06-20 prod, 2021-10-25 - --if log.file=="Forwarder" and log.level=="ERROR" - -- and log.msg:find("Response already written. Not sure about the state. Closing server connection for stability reason",0,true) - -- then return drop end - - -- NOT Seen 2022-08-30 - --if (log.file=="Forwarder"and log.level=="WARN")or(log.file=="LocalHttpServerResponse"and log.level=="ERROR") - -- and log.msg:find("non-proper HttpServerResponse occured",0,0) - -- --and log.raw:find("java.lang.IllegalStateException: You must set the Content-Length header to be the total size of the message body BEFORE sending any data if you are not using HTTP chunked encoding.\n\tat org.swisspush.gateleen.core.http.LocalHttpServerResponse.write(LocalHttpServerResponse.java:205") - -- then return drop end - - -- Tyro bullshit. Nothing we could do as tyro is EndOfLife. We have to await his removal. - -- Seen 2022-06-20 - if log.file=="SlicedLoop" and log.level=="WARN" - and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.") - and log.msg:find("SlicedLoop.EventLoopHogException: /houston/deployment/playbook/v1/.expand=4") - then return drop end - - -- TODO analyze - -- Seen 2022-06-20 - if log.file=="SlicedLoop" and log.level=="WARN" - and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.") - and log.msg:find("SlicedLoop.EventLoopHogException: /houston/from/vehicles/%d+/vehiclelink/status/v1/passengercounting/doors.expand=2") - then return drop end - - -- TODO analyze - -- Seen 2022-06-20 - if log.file=="SlicedLoop" and log.level=="WARN" - and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.") - and log.msg:find("SlicedLoop.EventLoopHogException: /houston/timetable/notification/v1/planningareas.expand=3") - then return drop end - - -- TODO analyze - -- Seen 2022-06-20 prod - if log.file=="SlicedLoop" and log.level=="WARN" - and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.") - and log.msg:find("SlicedLoop.EventLoopHogException: /houston/vehicles/%d+/vehicle/backup/v1/executions.expand=2") - then return drop end - - -- TODO analyze - -- Seen 2022-08-30 prod - if log.file=="SlicedLoop" and log.level=="WARN" - and log.msg:find("Task i=%d+ blocked event%-loop for %d+.%d+ ms.+EventLoopHogException.+" - .."/houston/timetable/disruption/v1/areas%?expand=3") - then return drop end - - -- TODO analyze - -- Seen 2022-06-20 prod - --if log.file=="RecursiveRootHandlerBase" and log.level=="ERROR" - -- and log.msg:find("Error in result of sub resource 'listeners' Message: Failed to decode: Unrecognized token 'Forbidden': was expecting %(JSON String, Number, Array, Object or token 'null', 'true' or 'false'%)") - -- then return drop end - - -- TODO create issue - -- Seen 2022-08-30 prod, 2022-06-20 prod - if log.file=="ConnectionBase" and log.level=="ERROR" - and log.msg:find("invalid version format: {") - then return drop end - - -- TODO Analyze - -- Seen 2022-08-30 prod - if log.file=="NSyncVerticle" and log.level=="ERROR" - and log.msg:find("Response%-Exception occurred while placing hook for Index" - .." id=[^ ]+" - .." rootPath=/houston/[cnosty]+/vehicles/%d+/[^ ]+ size=%d+.+VertxException.+ Connection was closed") - then return drop end - - -- TODO Analyze - -- Seen 2022-08-30 prod - if log.file=="HandlerRegistration" and log.level=="ERROR" - and log.msg:find("Failed to handleMessage. address: __vertx.reply.%d+.+IllegalStateException:" - .." Response is closed") - then return drop end - - -- Yet another bullshit msg - -- Seen 2022-08-30 prod - if log.file=="ContextImpl" and log.level=="ERROR" - and log.msg:find("Unhandled exception.+IllegalStateException: Response is closed") - then return drop end - - return pass -end - - --- Reject all the stuff which I consider to be useless noise. -function mod.isUselessNoise( that, log ) - local pass, drop = false, true - - -- Looks pretty useless as provided ways too few details - -- HINT: Occurred 4 times in 6 hrs (2021-09-17_12:00 to 2021-09-17_18:00) - -- Seen 2022-08-30 prod, 2022-06-20 - if log.file=="ConnectionBase" and log.level=="ERROR" - and log.msg:find("Connection reset by peer",0,true) - then return drop end - - -- Connection timeout because eddie offline - -- HINT: (EachOfTheThree) Occurred ~20000 times in 6 hrs (avg 1x per 1sec) (2021-09-17_12:00 to 2021-09-17_18:00) - if log.file=="Forwarder" and log.level=="WARN" - and log.msg:find("Failed to '[^ ]+ /from%-houston/.+ConnectTimeoutException: connection timed out: eddie") - then return drop end - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://eddie.+:7012/from.houston/.+/eagle/.+connection timed out: eddie.+") - then return drop end - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://eddie[0-9]+:7012/from.houston/.+/eagle/.+ Timeout") - then return drop end - if log.file=="Forwarder" and log.level=="WARN" - then return drop end - - -- Connection reset/refused because eddie offline - if log.file=="Forwarder" and log.level=="WARN" - and log.msg:find("Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+AnnotatedConnectException: Connection refused: eddie%d+.+:7012") - then return drop end - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection refused: eddie%d+.+:7012") - then return drop end - if log.file=="Forwarder" and log.level=="WARN" - and log.msg:find("Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\njava.io.IOException: Connection reset by peer") - then return drop end - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection reset by peer") - then return drop end - - -- Yet another EddieNotReachable (!!FATAL!!) error ... - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find(" Connection refused: eddie",0,true) - then return drop end - - -- Connection Close because eddie offline - if log.file=="BisectClient" and log.level=="ERROR" - and log.msg:find("Exception occurred for POST%-request /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index\n.+VertxException: Connection was closed") - then return drop end - - -- DNS crap for offline eddies - if log.file=="Forwarder" and log.level=="WARN" - and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+\n.+ Search domain query failed. Original hostname: 'eddie%d+' failed to resolve 'eddie%d+%.isa%-houston%.svc%.cluster%.local'") - then return drop end - -- HINT: Occurred 8219 times in 6 hrs (avg 1x per 2.5sec) (2021-09-17_12:00 to 2021-09-17_18:00) - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Search domain query failed. Original hostname: 'eddie%d+' failed to resolve 'eddie%d+") - then return drop end - -- HINT: Occurred 781 times in 6 hrs (avg _x per _sec) (2021-09-17_12:00 to 2021-09-17_18:00) - if log.file=="Forwarder" and log.level=="WARN" - and log.msg:find("Failed to '[^ ]+ /from.houston/%d+/eagle/.+\n.+SearchDomainUnknownHostException: Search domain query failed. Original hostname: 'eddie%d+' failed to resolve 'eddie%d+") - and log.raw:find("Caused by: .+DnsNameResolverTimeoutException: .+ query timed out after 5000 milliseconds") - then return drop end - -- Seen 2022-06-20 prod, 2021-10-25 - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find(" http://%w+:7012/from%-houston/%d+/eagle/nsync/v1/push/.+ Search domain query failed. Original hostname: 'eddie[^']+' failed to resolve 'eddie[%w.-]+'") - then return drop end - -- Occurred 1 times in 6 hrs (avg _x per _sec) (2021-09-17_12:00 to 2021-09-17_18:00) - if log.file=="Forwarder" and log.level=="WARN" - and log.msg:find("Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+UnknownHostException: failed to resolve 'eddie%d+' after %d+ queries") - then return drop end - -- Occurred 1 times in 6 hrs (avg _x per _sec) (2021-09-17_12:00 to 2021-09-17_18:00) - -- Seen 2022-06-20 prod - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://eddie%d+:7012/from%-houston/%d+/eagle/[^ ]+ failed to resolve 'eddie%d+' after %d+ queries") - then return drop end - - -- Some Strange connection limit for Trin - if log.file=="Forwarder" and log.level=="ERROR" - and log.msg:find("http://trin:8080/trin/from/vehicles/%d+/[^ ]+ Connection pool reached max wait queue size of") - then return drop end - if log.file=="Forwarder" and log.level=="WARN" - and log.msg:find("^Failed to 'PUT /trin/from/vehicles/[^ ]+'%s+io.vertx.core.http.ConnectionPoolTooBusyException: Connection pool reached max wait queue size of %d+") - then return drop end - - -- No idea what this msg should tell us. Has no details at all. - -- Seen 2022-08-30 prod - if log.file=="HttpClientRequestImpl" and log.level=="ERROR" - and log.msg:find("VertxException: Connection was closed", 0, true) - then return drop end - - return pass -end - - -function mod.debugPrintLogEntry( that, log ) - if that.printRaw then - print( log.raw ); - else - log:debugPrint() + -- Time range + local begDate, endDate = that.begDate, that.endDate + if begDate or endDate then + local date = normalizeIsoDateTime(log.date) + if begDate and date <= begDate then return drop end + if endDate and date > endDate then return drop end end + -- log level + local skipLevels = that.skipLevels + if skipLevels and skipLevels[log.level:upper()] then return drop end + -- dynamic filters + for iF = 1, #(that.filters) do + local filter = that.filters[iF] + if filter.matches(that, log) then + if filter.action == "drop" then return drop end + if filter.action == "keep" then return pass end + error("Unknown filter.action: \"".. filter.action .."\""); + end + end + return pass end -mod.main() +main() diff --git a/src/main/lua/paisa-logs/PaisaLogParser.lua b/src/main/lua/paisa-logs/PaisaLogParser.lua index 638e08c..8620e0c 100644 --- a/src/main/lua/paisa-logs/PaisaLogParser.lua +++ b/src/main/lua/paisa-logs/PaisaLogParser.lua @@ -386,6 +386,13 @@ function LogParse:getOrNewLogEntry() end +function exports.normalizeIsoDateTime( str ) + if str:find("%d%d%d%d%-%d%d%-%d%dT%d%d:%d%d:%d%d%.%d%d%d") then return str end + local y, mo, d, h, mi, s, ms = str:match("(%d%d%d)-(%d%d)-(%d%d)[ T_-](%d%d):(%d%d):(%d%d)[,.](%d%d%d)") + return y .."-".. mo .."-".. d .."T".. h ..":".. mi ..":".. s ..".".. ms +end + + LogEntry = { raw, date, -- cgit v1.1 From acf2ebad30b50cfb7666d8e9945892c0d7e4b737 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 19 Oct 2023 21:58:55 +0200 Subject: (LogDig) Update rules. Fix some bugs. --- src/main/lua/paisa-logs/DigHoustonLogs.lua | 329 ++++++++++++++++++++++------- src/main/lua/paisa-logs/PaisaLogParser.lua | 14 +- 2 files changed, 267 insertions(+), 76 deletions(-) diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua index c587d71..d7da356 100644 --- a/src/main/lua/paisa-logs/DigHoustonLogs.lua +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -9,11 +9,12 @@ local PaisaLogParser = require("PaisaLogParser") local normalizeIsoDateTime = require("PaisaLogParser").normalizeIsoDateTime +local LOGDBG = function(msg)io.stderr:write(msg)end local main, onLogEntry, isWorthToPrint, loadFilters, initFilters -local function main() +function main() local that = { logPattern = "DATE STAGE SERVICE LEVEL FILE - MSG", -- Since 2021-09-24 on prod printRaw = true, @@ -30,112 +31,289 @@ local function main() end -local function loadFilters( that ) +function loadFilters( that ) assert(not that.filters) that.filters = { - { action = "drop", beforeDate = "2023-10-18 03:00:00.000" }, - { action = "drop", afterDate = "2023-10-18 15:00:00.000" }, + { action = "drop", beforeDate = "2023-10-18 03:00:00.000", }, + { action = "drop", afterDate = "2023-10-18 15:00:00.000", }, { action = "drop", level = "TRACE" }, { action = "drop", level = "DEBUG" }, { action = "drop", level = "INFO" }, + { action = "drop", level = "WARN" }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "ContextImpl", level = "ERROR", + msgEquals = "Unhandled exception\njava.lang.NullPointerException: No null handler accepted", + stackPattern = "^" + .."\tat java.util.Objects.requireNonNull.Objects.java:246. ~..:..\n" + .."\tat io.vertx.core.impl.future.FutureImpl.onComplete.FutureImpl.java:132. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" + .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete.PromiseImpl.java:23. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" + .."\tat io.vertx.core.file.impl.FileSystemImpl.delete.FileSystemImpl.java:290. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" + .."\tat org.swisspush.reststorage.FilePutter.FileCleanupManager.deleteFile.FilePutter.java:218. ~.rest.storage.[0-9.]+.jar:..\n" + .."\tat org.swisspush.reststorage.FilePutter.FileCleanupManager.lambda.cleanupFile.0.FilePutter.java:192. ~.rest.storage.[0-9.]+.jar:..\n" + .."\tat io.vertx.core.impl.future.FutureImpl.3.onSuccess.FutureImpl.java:141. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" + }, + + -- Seen: 2023-10-18 prod + -- TODO open PR to add some logging so we have a chance to find submarine. + { action = "drop", file = "ContextImpl", level = "ERROR", + msgEquals = "Unhandled exception\njava.lang.IllegalStateException: Response head already sent", + stackPattern = "^" + .."\tat io.vertx.core.http.impl.Http1xServerResponse.checkHeadWritten.Http1xServerResponse.java:684. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" + .."\tat io.vertx.core.http.impl.Http1xServerResponse.setStatusCode.Http1xServerResponse.java:153. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" + .."\tat org.swisspush.gateleen.routing.Forwarder.lambda.getAsyncHttpClientResponseHandler.7.Forwarder.java:430. ~.gateleen.routing.[0-9.]+.jar:..\n" + .."\tat io.vertx.core.impl.future.FutureImpl.3.onFailure.FutureImpl.java:153. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", + }, + + -- Reported: SDCISA-13717 + -- Seen: 2023-10-18 prod + { action = "drop", file = "LocalHttpServerResponse", level = "ERROR", + msgPattern = "^non%-proper HttpServerResponse occured\r?\n" + .."java.lang.IllegalStateException:" + .." You must set the Content%-Length header to be the total size of the message body BEFORE sending any data if you are not using" + .." HTTP chunked encoding.", }, + + -- Reported: + -- Seen: 2023-10-18 prod + { action = "drop", file = "ContextImpl", level = "ERROR", + msgPattern = "Unhandled exception\n" + .."java.lang.IllegalStateException: You must set the Content%-Length header to be the total size of the message body BEFORE sending" + .." any data if you are not using HTTP chunked encoding.", }, + + -- Seen: 2023-10-18 + -- Opened nsync PR 49 as a first counter measure. + { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.NullPointerException: null", + stackStartsWith = "\tat org.swisspush.nsync.multiget.MultiGetServer.lambda$tryLaunchOneRequest$2(MultiGetServer.java:107) ~[nsync-0.6.0.jar:?]" }, + + + -- Bunch of nonsense !ERROR!s which happen all the time as eddies go offline. + + -- Seen: 2023-10-18 + -- Happens all the time as gateleens error reporting is broken-by-desing. + { action = "drop", file = "Forwarder", level = "WARN", + msgPattern = "^..... ................................ Problem to request /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks" + .."%-affiliated%-planning%-area%-[0-9]+%-vehicles: io.netty.channel.ConnectTimeoutException: connection timed out:" + .." eddie[0-9]+.pnet.ch/[0-9]+:7012", }, + -- Seen: 2023-10-18 + -- Nearly same as above but on ERROR level instead. + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles" + .." The timeout period of 30000ms has been exceeded while executing POST /from.houston/%d+/eagle/nsync/v1/push/" + .."trillian.phonebooks.affiliated.planning.area.%d+.vehicles for server eddie%d+:7012", }, + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip The timeout period of 30000ms has been exceeded" + .." while executing PUT /houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip for server localhost:9089", }, + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip Timeout$" }, + -- Seen: 2023-10-18 prod + -- I guess this happens if an eddie tries to put his "backup.zip" via shaky connection. + { action = "drop", file = "FilePutter", level = "ERROR", + msgEquals = "Put file failed:\nio.vertx.core.VertxException: Connection was closed", }, + -- Seen: 2023-10-18 prod + -- There are a whole bunch of related errors behind this filter which AFAICT all relate to shaky eddie connections. + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+" + .." The timeout period of 30000ms has been exceeded while executing [DEGLOPSTU]+ /from.houston/%d+/eagle/[^ ]+ for server eddie%d+:7012$", }, + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+ Connection was closed$", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "ConnectionBase", level = "ERROR", msgEquals = "Connection reset by peer", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "EventBusBridgeImpl", level = "ERROR", msgEquals = "SockJSSocket exception\nio.vertx.core.VertxException: Connection was closed", }, + + -- Seen: 2023-10-18 prod + -- Reported: TODO link existing issue here + { action = "drop", file = "HttpHeaderUtil", level = "ERROR", + msgPattern = "Keep%-Alive%} values do not match timeout=42 != timeout=120 for request /googleplex/.*", }, + + -- Seen: 2023-10-18 prod + -- Reported: + { action = "drop", file = "Utils", level = "ERROR", + msgPattern = "^Exception occurred\nio.vertx.core.eventbus.ReplyException: Sync failed.\n" + .."{\n" + ..' "countIndexQueries" : 1,\n' + ..' "countSentBytes" : 119,\n' + ..' "countReceivedBytes" : 0,\n' + ..' "countMultiGetRequests" : 0,\n' + ..' "countPuts" : 0,\n' + ..' "countDeletes" : 0,\n' + ..' "durationSeconds" : 0.0,\n' + ..' "iterationDepth" : 0\n' + .."}", }, + + -- Seen: 2023-10-18 prod + -- Reported: + { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: null", + stackPattern = "^" + .."\tat org.swisspush.gateleen.core.http.LocalHttpClientRequest.connection.LocalHttpClientRequest.java:754. ~.gateleen.core.[0-9.]+.jar:..\n" + .."\tat org.swisspush.gateleen.routing.Forwarder.1.lambda.handle.0.Forwarder.java:362. ~.gateleen.routing.[0-9.]+.jar:..\n" + .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", + }, + + -- Seen: 2023-10-18 prod + -- Reported: + { action = "drop", file = "Utils", level = "ERROR", + msgPattern = "^Exception occurred\nio.vertx.core.eventbus.ReplyException: Timed out after waiting 30000.ms. for a reply. address:" + .." __vertx.reply.[0-9]+, repliedAddress: nsync.reregister.sync/slarti.vehicle.setup.sync.[0-9]+", + }, + + -- Seen: 2023-10-18 prod + -- Reported: + { action = "drop", file = "Utils", level = "ERROR", msgPattern = "^Exception occurred\n" + .."io.vertx.core.eventbus.ReplyException: Timed out after waiting 30000.ms. for a reply. address: __vertx.reply.[0-9]+, repliedAddress: nsync.register.sync" }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", + msgEquals = "Connection was closed\nio.vertx.core.VertxException: Connection was closed", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "^..... ................................ http://bistr:8080/bistr/vending/accounting/v1/information/lastSessionEnd Connection was closed$", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "..... ................................ http://bob:8080/bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" + .." The timeout period of 30000ms has been exceeded while executing PUT /bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" + .." for server bob:8080", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.IllegalStateException: null", + stackStartsWith = "" + .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.checkEnded(HttpClientResponseImpl.java:150) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.endHandler(HttpClientResponseImpl.java:172) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler$7(Forwarder.java:476) ~[gateleen-routing-1.3.25.jar:?]\n" + .."\tat io.vertx.core.impl.future.FutureImpl$3.onSuccess(FutureImpl.java:141) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.FutureBase.emitSuccess(FutureBase.java:60) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.FutureImpl.addListener(FutureImpl.java:196) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.PromiseImpl.addListener(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.FutureImpl.onComplete(FutureImpl.java:164) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.http.impl.HttpClientRequestBase.response(HttpClientRequestBase.java:240) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.http.HttpClientRequest.send(HttpClientRequest.java:330) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat org.swisspush.gateleen.routing.Forwarder$1.lambda$handle$1(Forwarder.java:377) ~[gateleen-routing-1.3.25.jar:?]\n" + .."\tat org.swisspush.gateleen.core.http.BufferBridge.lambda$pump$0(BufferBridge.java:43) ~[gateleen-core-1.3.25.jar:?]\n" + .."\tat io.vertx.core.impl.AbstractContext.dispatch(AbstractContext.java:100) ~[vertx-core-4.2.1.jar:4.2.1]\n", + }, + + -- Seen: 2023-10-18 prod + -- TODO Push issue to my backlog to fix this. + { action = "drop", file = "ContextImpl", level = "ERROR", + msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.", + stackPattern = "^" + .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:41. ~.gateleen.core.[0-9.]+.jar:..\n" + .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:24. ~.gateleen.core.[0-9.]+.jar:..\n" + .."\tat org.swisspush.gateleen.logging.LoggingWriteStream.drainHandler.LoggingWriteStream.java:73. ~.gateleen.logging.[0-9.]+.jar:..\n" + .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:95. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" + .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:39. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" + .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.4.Forwarder.java:494. ~.gateleen.routing.[0-9.]+.jar:..\n" + .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.5.Forwarder.java:503. ~.gateleen.routing.[0-9.]+.jar:..\n" + .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", + }, + + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "^..... ................................ http://thought:8080/thought/vehicleoperation/recording/v1/events The timeout period of 60000ms has been exceeded while executing PUT /thought/vehicleoperation/recording/v1/events for server thought:8080$", + }, + + -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://eddie%d+:7012/from.houston/%d+/eagle/vending/accounting/v1/users/%d+/years/%d+/months/%d%d/account Connection was closed$", }, + -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection was closed$", }, + -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/query.index The timeout period of 30000ms has been exceeded while executing" + .." POST /from.houston/%d+/eagle/nsync/v1/query-index for server eddie%d+:7012$", }, + -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://eddie%d+:7012/from.houston/%d+/eagle/timetable/notification/v1/planningareas/%d+/notifications/%x+ Connection was closed$", }, + -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection reset by peer$", }, + + ---- TODO Thought timeout? Can happen. But how often is ok? + ---- HINT: Occurred 15 times in 6 hrs (avg 1x per 24min) (2021-09-17_12:00 to 2021-09-17_18:00) + ---- Seen 2022-06-20, 2022-08-30 prod + --{ action = "drop", file = "Forwarder", level = "ERROR", + -- msgPattern = "http://[aghilmostuwy]+:8080/[aghilmostuwy]+/vehicleoperation/recording/v1/.+ Timeout", }, ---- [SDCISA-9572] pag --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = "http://[8acgilmnpsvwy]+:[78]080/[_aegilmopstwy]+/.+ Connection was closed", }, + -- msgPattern = "http://[8acgilmnpsvwy]+:[78]080/[_aegilmopstwy]+/.+ Connection was closed", }, --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = "t.ch:7022/brox/from/vehicles/.+Connection refused: ", }, + -- msgPattern = "t.ch:7022/brox/from/vehicles/.+Connection refused: ", }, --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = " http://%w+.pnet.ch:7022/brox/info Connection refused: %w+.pnet.ch/[%d.]+:7022" }, + -- msgPattern = " http://%w+.pnet.ch:7022/brox/info Connection refused: %w+.pnet.ch/[%d.]+:7022" }, ---- TODO Analyze ---- Observed 20014 times within 6 hrs (~1/sec) (2021-09-17_12:00 to 2021-09-17_18:00) ---- HINT: Eddie connections issues also have around 20000 occurrences. Maybe related? ---- Seen: 2021-09-17 --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = "http://eddie%d+:7012/from.houston/[^/]+/eagle/[^ ]+ Response already written. Not sure about the" + -- msgPattern = "http://eddie%d+:7012/from.houston/[^/]+/eagle/[^ ]+ Response already written. Not sure about the" -- .." state. Closing server connection for stability reason", }, - ---- TODO link or create issue - ---- HINT: Occurred 774 times within 6 hrs (~2x/min) (2021-09-17_12:00 to 2021-09-17_18:00) - ---- Seen: 2022-06-20 prod - --{ action = "drop", file = "Utils", level = "ERROR", - -- msgPat = "Exception occurred\n%(TIMEOUT,%-1%) Timed out after waiting 30000%(ms%) for a reply. address:" - -- .." __vertx.reply.+, repliedAddress: nsync%-register%-sync", }, - ---- TODO Analyze ---- HINT: Occurred 1538 times in 6 hrs (~ 1x per 15sec) (2021-09-17_12:00 to 2021-09-17_18:00) --{ action = "drop", file = "Forwarder", level = "WARN", - -- msgPat = "Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+VertxException: Connection was closed", }, + -- msgPattern = "Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+VertxException: Connection was closed", }, --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = "http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection was closed", }, + -- msgPattern = "http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection was closed", }, ---- TODO Analyze ---- Seen: 2021-09-17, ..., 2022-06-20 --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = "http://pag:8080/pag/user/information/v1/directory/sync/request Timeout", }, + -- msgPattern = "http://pag:8080/pag/user/information/v1/directory/sync/request Timeout", }, ---- Seen 2021-10-25, 2022-08-30 prod --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = "http://[8acgilmnpsvwy]+:8080/[_aegilmopstwy]+/.+ Response already written. Not sure about the" + -- msgPattern = "http://[8acgilmnpsvwy]+:8080/[_aegilmopstwy]+/.+ Response already written. Not sure about the" -- .." state. Closing server connection for stability reason", }, ---- TODO Analyze. ---- Seen 2021-09-17, 2022-06-20 --{ action = "drop", file = "BisectClient", level = "WARN", - -- msgPat = "statusCode=503 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, + -- msgPattern = "statusCode=503 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, ---- Seen 2022-06-20 prod --{ action = "drop", file = "BisectClient", level = "WARN", - -- msgPat = "statusCode=504 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, + -- msgPattern = "statusCode=504 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, ---- TODO rm filter when fixed ---- Reported: SDCISA-9573 ---- Seen: 2022-08-30 prod, 2022-06-20, 2021-09-17 --{ action = "drop", file = "BisectClient", level = "WARN", - -- msgPat = "Index id=slarti%-vehicle%-setup%-sync%-%d+ rootPath=/houston/from/vehicles/%d+/vehicle/setup/v1 size=%d+ not %(nor no more%) ready. Aborting BisectClient", }, - - ---- TODO Thought timeout? Can happen. But how often is ok? - ---- HINT: Occurred 15 times in 6 hrs (avg 1x per 24min) (2021-09-17_12:00 to 2021-09-17_18:00) - ---- Seen 2022-06-20, 2022-08-30 prod - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = "http://[aghilmostuwy]+:8080/[aghilmostuwy]+/vehicleoperation/recording/v1/.+ Timeout", }, + -- msgPattern = "Index id=slarti%-vehicle%-setup%-sync%-%d+ rootPath=/houston/from/vehicles/%d+/vehicle/setup/v1 size=%d+ not %(nor no more%) ready. Aborting BisectClient", }, ---- Reported: SDCISA-9574 ---- TODO rm when resolved ---- Seen: 2021-09-17 2022-06-20, 2022-08-30 prod, --{ action = "drop", file = "Utils", level = "ERROR", - -- msgPat = "Exception occurred\n%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}", }, + -- msgPattern = "Exception occurred\n%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}", }, ---- TODO Analyze --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = "http://preflux:8080/preflux/data/preflux/rollout/hosts/eddie%d+/instances/default/situation Timeout", }, + -- msgPattern = "http://preflux:8080/preflux/data/preflux/rollout/hosts/eddie%d+/instances/default/situation Timeout", }, ---- TODO Analyze. ---- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 --{ action = "drop", file = "RedisQues", level = "WARN", - -- msgPat = "Registration for queue .+ has changed to null", }, + -- msgPattern = "Registration for queue .+ has changed to null", }, ---- Reported: SDCISA-10973 ---- Seen: 2023-10-18 prod. --{ action = "drop", file = "HttpClientRequestImpl", level = "ERROR", - -- msgPat = "The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+" + -- msgPattern = "The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+" -- .."/vehicle/backup/v1/executions/[0-9]+/backup.zip for server localhost:9089", }, - ---- Reported: TODO - ---- Seen: 2023-10-18 prod. - --{ action = "drop", file = "Utils", level = "ERROR", - -- msgPat = "Exception occurred\nio.vertx.core.eventbus.ReplyException: Timed out after waiting 30000%(ms%) for" - -- .." a reply. address: __vertx.reply.[0-9]+, repliedAddress: nsync%-re", }, - - ---- Seen: 2023-10-18 prod - --{ action = "drop", file = "HttpHeaderUtil", level = "ERROR", - -- msgPat = "Keep%-Alive%} values do not match timeout=42 != timeout=120 for request /googleplex/internal/security/login_state", }, - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = "[%a-z0-9]+ [a-z0-9]+ http://eddie.....:7012/from%-houston/[^/]+/eagle/nsync/v1/push/trillian" + -- msgPattern = "[%a-z0-9]+ [a-z0-9]+ http://eddie.....:7012/from%-houston/[^/]+/eagle/nsync/v1/push/trillian" -- .."%-phonebooks%-affiliated%-planning%-area%-[^-]+%-vehicles The timeout period of 30000ms has been" -- .." exceeded while executing POST /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks" -- .."%-affiliated%-planning%-area%-[^%-]+-vehicles for server eddie.....:7012", }, @@ -144,46 +322,64 @@ local function loadFilters( that ) ---- TODO rm when fixed ---- Seen 2022-08-30 prod, 2022-06-20 prod --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" + -- msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" -- .." Connection reset by peer", }, --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" + -- msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" -- .." Connection was closed", }, --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" + -- msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" -- .." Response already written. Not sure about the state. Closing server connection for stability reason", }, ---- TODO analyze ---- Seen 2022-06-20 prod --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPat = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/timetable/private/v1/trip/state/%w+.xml Connection was closed", }, + -- msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/timetable/private/v1/trip/state/%w+.xml Connection was closed", }, } end -local function initFilters( that ) +function initFilters( that ) for iF = 1, #(that.filters) do local descr = that.filters[iF] - local beforeDate, afterDate = descr.beforeDate, descr.afterDate - local file, level, msgPat = descr.file, descr.level, descr.msgPat + local beforeDate = descr.beforeDate and normalizeIsoDateTime(descr.beforeDate) + local afterDate = descr.afterDate and normalizeIsoDateTime(descr.afterDate) + local file, level, msgPattern = descr.file, descr.level, descr.msgPattern + local rawPattern, stackPattern = descr.rawPattern, descr.stackPattern + local stackStartsWith = descr.stackStartsWith local filter = { action = descr.action, matches = false, } - filter.matches = function( that, log ) - if file and file ~= log.file then return false end - if level and level ~= log.level then return false end - local logDate = normalizeIsoDateTime(log.date) - if logDate < beforeDate then return false end - if logDate >= afterDate then return false end - if msgPat and not log.msg:find(msgPat) then return false end - return true + local hasAnyCondition = (beforeDate or afterDate or file or level or msgPattern or rawPattern or stackPattern or stackStartsWith); + if not hasAnyCondition then + filter.matches = function( that, log ) --[[LOGDBG("match unconditionally\n")]] return true end + else + filter.matches = function( that, log ) + local match, mismatch = true, false + if not log.date then log:debugPrint() end + if level and level ~= log.level then --[[LOGDBG("level mismatch: \"".. level .."\" != \"".. log.level .."\"\n")]] return mismatch end + if file and file ~= log.file then --[[LOGDBG("file mismatch: \"".. file .."\" != \"".. log.file .."\"\n")]] return mismatch end + local logDate = normalizeIsoDateTime(log.date) + local isBeforeDate = (not beforeDate or logDate < beforeDate); + local isAfterDate = (not afterDate or logDate >= afterDate); + if not isBeforeDate then --[[LOGDBG("not before: \"".. tostring(beforeDate) .."\", \"".. logDate .."\"\n")]] return mismatch end + if not isAfterDate then --[[LOGDBG("not after: \"".. tostring(afterDate) .."\", \"".. logDate .."\"\n")]] return mismatch end + if msgEquals and log.msg ~= msgEquals then return mismatch end + if stackStartsWith and log.stack and log.stack:sub(1, #stackStartsWith) ~= stackStartsWith then return mismatch end + if msgPattern and not log.msg:find(msgPattern) then --[[LOGDBG("match: msgPattern\n")]] return mismatch end + if stackPattern and log.stack and not log.stack:find(stackPattern) then return mismatch end + if rawPattern and not log.raw:find(rawPattern) then return mismatch end + --LOGDBG("DEFAULT match\n") + return match + end end that.filters[iF] = filter end end -local function onLogEntry( log, that ) - if isWorthToPrint(that, log) then +function onLogEntry( log, that ) + local isWorthIt = isWorthToPrint(that, log) + if isWorthIt then if that.printRaw then print(log.raw) else @@ -193,19 +389,8 @@ local function onLogEntry( log, that ) end -local function isWorthToPrint( that, log ) +function isWorthToPrint( that, log ) local pass, drop = true, false - -- Time range - local begDate, endDate = that.begDate, that.endDate - if begDate or endDate then - local date = normalizeIsoDateTime(log.date) - if begDate and date <= begDate then return drop end - if endDate and date > endDate then return drop end - end - -- log level - local skipLevels = that.skipLevels - if skipLevels and skipLevels[log.level:upper()] then return drop end - -- dynamic filters for iF = 1, #(that.filters) do local filter = that.filters[iF] if filter.matches(that, log) then diff --git a/src/main/lua/paisa-logs/PaisaLogParser.lua b/src/main/lua/paisa-logs/PaisaLogParser.lua index 8620e0c..f6ac0ce 100644 --- a/src/main/lua/paisa-logs/PaisaLogParser.lua +++ b/src/main/lua/paisa-logs/PaisaLogParser.lua @@ -357,8 +357,11 @@ end function LogParse:appendStacktraceLine() local log = self:getOrNewLogEntry() - log.stack = log.stack or ""; - log.stack = log.stack .."\n".. self.line; + if not log.stack then + log.stack = self.line + else + log.stack = log.stack .."\n".. self.line + end -- Also append to raw to have the complete entry there. log.raw = log.raw .."\n".. self.line; end @@ -376,7 +379,10 @@ function LogParse:publishLogEntry() error(msg); return end self.log = nil; -- Mark as consumed - self.cb_onLogEntry( log, self.cb_cls ) + -- Make sure log lines do NOT end in 0x0D + local msg = log.msg + if msg:byte(msg:len()) == 0x0D then log.msg = msg:sub(1, -2) end + self.cb_onLogEntry(log, self.cb_cls) end @@ -388,7 +394,7 @@ end function exports.normalizeIsoDateTime( str ) if str:find("%d%d%d%d%-%d%d%-%d%dT%d%d:%d%d:%d%d%.%d%d%d") then return str end - local y, mo, d, h, mi, s, ms = str:match("(%d%d%d)-(%d%d)-(%d%d)[ T_-](%d%d):(%d%d):(%d%d)[,.](%d%d%d)") + local y, mo, d, h, mi, s, ms = str:match("^(%d%d%d%d)-(%d%d)-(%d%d)[ T_-](%d%d):(%d%d):(%d%d)[,.](%d%d%d)$") return y .."-".. mo .."-".. d .."T".. h ..":".. mi ..":".. s ..".".. ms end -- cgit v1.1 From e29169dad507e73591e5f3ca9cbaeb57ea90df99 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 24 Oct 2023 10:46:52 +0200 Subject: (patch) Add eagle patches --- .../patch/eagle/default-bak20211124-080400.patch | 103 +++++++++++++++++++++ .../patch/eagle/default-bak20230220-121000.patch | 102 ++++++++++++++++++++ .../patch/eagle/default-bak20231024-082300.patch | 101 ++++++++++++++++++++ src/main/patch/eagle/default.patch | 101 ++++++++++++++++++++ src/main/patch/eagle/simplelogger.patch | 33 +++++++ 5 files changed, 440 insertions(+) create mode 100644 src/main/patch/eagle/default-bak20211124-080400.patch create mode 100644 src/main/patch/eagle/default-bak20230220-121000.patch create mode 100644 src/main/patch/eagle/default-bak20231024-082300.patch create mode 100644 src/main/patch/eagle/default.patch create mode 100644 src/main/patch/eagle/simplelogger.patch diff --git a/src/main/patch/eagle/default-bak20211124-080400.patch b/src/main/patch/eagle/default-bak20211124-080400.patch new file mode 100644 index 0000000..c7d3a8c --- /dev/null +++ b/src/main/patch/eagle/default-bak20211124-080400.patch @@ -0,0 +1,103 @@ + + General patch to fix crappy desing. + + Contains: + - Logging override to get back control over logging. + - Listen on localhost only. + - Disable NSync. To suppress that useless noise. + + +diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml +index 5b226670..45acc276 100644 +--- a/eagle-process/pom.xml ++++ b/eagle-process/pom.xml +@@ -21,7 +21,27 @@ + true + + +- ++ ++ org.slf4j ++ slf4j-api ++ 1.7.25 ++ ++ ++ org.slf4j ++ slf4j-simple ++ 1.7.25 ++ ++ ++ org.slf4j ++ jcl-over-slf4j ++ 1.7.25 ++ ++ ++ org.slf4j ++ jul-to-slf4j ++ 1.7.25 ++ ++ + + ch.post.it.paisa.eagle + eagle-domain +diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java +index 13ebdc51..9e947a2d 100644 +--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java ++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java +@@ -268,7 +268,7 @@ public class MainVerticle extends AbstractVerticle { + doubleSlashCheckHandler = new DoubleSlashCheckHandler(doubleSlashCheckerMode); + ClasspathResourceHandler classpathResourceHandler = new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/"); + +- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); ++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); + + ReturnHttpErrorHandler returnHttpErrorHandler = new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT); + +@@ -377,9 +377,9 @@ public class MainVerticle extends AbstractVerticle { + if (classpathResourceHandler.handle(request)) { + return; + } +- if (nSyncHandler.handle(request)) { +- return; +- } ++ //if (nSyncHandler.handle(request)) { ++ // return; ++ //} + + // Attention to the order of handlers - handlers from here can fire selfrequests + if ((env.equals("dev") || env.equals("test")) && validationHandler.isToValidate(request)) { +@@ -440,7 +440,7 @@ public class MainVerticle extends AbstractVerticle { + vertxRouter.route().handler(routingContextHandler); + + mainServer.requestHandler(vertxRouter::accept); +- mainServer.listen(mainPort, x -> { ++ mainServer.listen(mainPort, "127.0.0.1", x -> { + propertyHandler.addRefreshable(schedulerResourceManager); + log.info(" I am ready"); + String bootLogFilename = props.getOrDefault("service.boot.log", "/data/init/boot.log").toString(); +@@ -530,7 +530,7 @@ public class MainVerticle extends AbstractVerticle { + final Future storageDataFuture = newLoggingFuture.apply("storage-data" ); + final Future mirrorModFuture = newLoggingFuture.apply("MirrorMod" ); + final Future metricsModuleFuture = newLoggingFuture.apply("MetricsModule" ); +- final Future nsyncFuture = newLoggingFuture.apply("NSync" ); ++ //final Future nsyncFuture = newLoggingFuture.apply("NSync" ); + CompositeFuture.all(futuresToWaitFor).setHandler(handler); + + +@@ -669,11 +669,11 @@ public class MainVerticle extends AbstractVerticle { + //////////////////////////////////// NSync ///////////////////////////////////////////////////////////////////////// + ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + { +- NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig() +- .withBasePath(EAGLE_NSYNC_PATH) +- .withMainPort(mainPort); +- DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); +- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer()); ++// NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig() ++// .withBasePath(EAGLE_NSYNC_PATH) ++// .withMainPort(mainPort); ++// DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); ++// vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer()); + } + + vertx.deployVerticle(new DirectMemoryUsageLogger(), new DeploymentOptions().setConfig(new JsonObject(props))); diff --git a/src/main/patch/eagle/default-bak20230220-121000.patch b/src/main/patch/eagle/default-bak20230220-121000.patch new file mode 100644 index 0000000..76cedd0 --- /dev/null +++ b/src/main/patch/eagle/default-bak20230220-121000.patch @@ -0,0 +1,102 @@ + + General patch to fix crappy desing. + + Contains: + - Logging override to get back control over logging. + - Listen on localhost only (DISABLED for zarniwoop) + - Disable NSync. To suppress that useless noise. + + +diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml +index 5b226670..45acc276 100644 +--- a/eagle-process/pom.xml ++++ b/eagle-process/pom.xml +@@ -21,6 +21,28 @@ + true + + ++ ++ ++ org.slf4j ++ slf4j-api ++ 1.7.25 ++ ++ ++ org.slf4j ++ slf4j-simple ++ 1.7.25 ++ ++ ++ org.slf4j ++ jcl-over-slf4j ++ 1.7.25 ++ ++ ++ org.slf4j ++ jul-to-slf4j ++ 1.7.25 ++ ++ + + + ch.post.it.paisa.eagle +diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java +index 13ebdc51..9e947a2d 100644 +--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java ++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java +@@ -268,7 +268,7 @@ public class MainVerticle extends AbstractVerticle { + doubleSlashCheckHandler = new DoubleSlashCheckHandler(doubleSlashCheckerMode); + ClasspathResourceHandler classpathResourceHandler = new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/"); + +- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); ++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); + + ReturnHttpErrorHandler returnHttpErrorHandler = new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT); + +@@ -325,7 +325,7 @@ public class MainVerticle extends AbstractVerticle { + .withLoggingResourceManager(loggingResourceManager) + .withMonitoringHandler(monitoringHandler) + .withHttpClientFactory(this::createHttpClientForRouter) +- .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, nSyncHandler)) ++ .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, null)) + .build(); + }); + }); +@@ -412,9 +412,9 @@ public class MainVerticle extends AbstractVerticle { + if (hookHandler.handle(request)) { + return; + } +- if (nSyncHandler.handle(request)) { +- return; +- } ++ //if (nSyncHandler.handle(request)) { ++ // return; ++ //} + if (eventBusHandler.handle(request)) { + return; + } +@@ -559,7 +559,7 @@ public class MainVerticle extends AbstractVerticle { + final Future storageDataFuture = newLoggingFuture.apply("storage-data" ); + final Future mirrorModFuture = newLoggingFuture.apply("MirrorMod" ); + final Future metricsModuleFuture = newLoggingFuture.apply("MetricsModule" ); +- final Future nsyncFuture = newLoggingFuture.apply("NSync" ); ++ //final Future nsyncFuture = newLoggingFuture.apply("NSync" ); + CompositeFuture.all(futuresToWaitFor).setHandler(handler); + + +@@ -721,10 +721,10 @@ public class MainVerticle extends AbstractVerticle { + ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + { +- NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig() +- .withBasePath(EAGLE_NSYNC_PATH) +- .withMainPort(mainPort); +- DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); +- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer()); ++ //NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig() ++ // .withBasePath(EAGLE_NSYNC_PATH) ++ // .withMainPort(mainPort); ++ //DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); ++ //vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer()); + } + + vertx.deployVerticle(new DirectMemoryUsageLogger(), new DeploymentOptions().setConfig(new JsonObject(props))); diff --git a/src/main/patch/eagle/default-bak20231024-082300.patch b/src/main/patch/eagle/default-bak20231024-082300.patch new file mode 100644 index 0000000..5578433 --- /dev/null +++ b/src/main/patch/eagle/default-bak20231024-082300.patch @@ -0,0 +1,101 @@ + + General patch to fix crappy desing. + + Contains: + - Logging override to get back control over logging. + - Listen on localhost only (DISABLED for zarniwoop) + - Disable NSync. To suppress that useless noise. + + Based on "326188f9ed8830cce3ec9865ea3598945726c308" from "2023-02-13" near + "eagle-02.01.10.00". + + +diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml +index 5b226670..45acc276 100644 +--- a/eagle-process/pom.xml ++++ b/eagle-process/pom.xml +@@ -23,6 +23,23 @@ + + + ++ ++ ++ org.slf4j ++ slf4j-simple ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ jcl-over-slf4j ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ jul-to-slf4j ++ ${slf4j.version} ++ ++ + + + ch.post.it.paisa.eagle +diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java +index 13ebdc51..9e947a2d 100644 +--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java ++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java +@@ -323,7 +323,7 @@ public class MainVerticle extends AbstractVerticle { + doubleSlashCheckHandler = new DoubleSlashCheckHandler(doubleSlashCheckerMode); + ClasspathResourceHandler classpathResourceHandler = new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/"); + +- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); ++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); /*TODO revert*/ + + ReturnHttpErrorHandler returnHttpErrorHandler = new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT); + +@@ -387,7 +387,7 @@ public class MainVerticle extends AbstractVerticle { + .withLoggingResourceManager(loggingResourceManager) + .withMonitoringHandler(monitoringHandler) + .withHttpClientFactory(this::createHttpClientForRouter) +- .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, nSyncHandler, authorizer)) ++ .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, null, authorizer)) + .build(); + }); + }); +@@ -476,9 +476,9 @@ public class MainVerticle extends AbstractVerticle { + if (hookHandler.handle(request)) { + return; + } +- if (nSyncHandler.handle(request)) { +- return; +- } ++ //if (nSyncHandler.handle(request)) { ++ // return; ++ //} + if (eventBusHandler.handle(request)) { + return; + } +@@ -624,7 +624,7 @@ public class MainVerticle extends AbstractVerticle { + final Promise storageLogPromise = newLoggingFuture.apply("storage-log" ); + final Promise storageDataPromise = newLoggingFuture.apply("storage-data" ); + final Promise metricsModulePromise = newLoggingFuture.apply("MetricsModule" ); +- final Promise nsyncPromise = newLoggingFuture.apply("NSync" ); ++ //final Promise nsyncPromise = newLoggingFuture.apply("NSync" ); /*TODO revert*/ + CompositeFuture.all(futuresToWaitFor).onComplete(handler); + + +@@ -776,10 +776,10 @@ public class MainVerticle extends AbstractVerticle { + //////////////////////////////////// NSync ///////////////////////////////////////////////////////////////////////// + ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + { +- NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig() +- .withBasePath(EAGLE_NSYNC_PATH) +- .withMainPort(mainPort); +- DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); +- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise); ++ // TODO NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig() ++ // TODO .withBasePath(EAGLE_NSYNC_PATH) ++ // TODO .withMainPort(mainPort); ++ // TODO DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); ++ // TODO vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise); + } + + vertx.deployVerticle(new DirectMemoryUsageLogger(), new DeploymentOptions().setConfig(new JsonObject(props))); diff --git a/src/main/patch/eagle/default.patch b/src/main/patch/eagle/default.patch new file mode 100644 index 0000000..c0b6785 --- /dev/null +++ b/src/main/patch/eagle/default.patch @@ -0,0 +1,101 @@ + + General patch to fix crappy desing. + + Contains: + - Logging override to get back control over logging. + - Listen on localhost only (DISABLED for zarniwoop) + - Disable NSync. To suppress that useless noise. + + Based on "326188f9ed8830cce3ec9865ea3598945726c308" from "2023-02-13" near + "eagle-02.01.10.00". + + +diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml +index 5b226670..45acc276 100644 +--- a/eagle-process/pom.xml ++++ b/eagle-process/pom.xml +@@ -23,6 +23,23 @@ + + + ++ ++ ++ org.slf4j ++ slf4j-simple ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ jcl-over-slf4j ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ jul-to-slf4j ++ ${slf4j.version} ++ ++ + + + ch.post.it.paisa.eagle +diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java +index 13ebdc51..9e947a2d 100644 +--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java ++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java +@@ -367,7 +367,7 @@ public class MainVerticle extends AbstractVerticle { + ClasspathResourceHandler classpathResourceHandler = + new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/"); + +- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); ++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); + + ReturnHttpErrorHandler returnHttpErrorHandler = + new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT); +@@ -470,7 +470,7 @@ public class MainVerticle extends AbstractVerticle { + selfClient, + classpathResourceHandler, + returnHttpErrorHandler, +- nSyncHandler, ++ null, + authorizer)) + .build(); + }); +@@ -607,9 +607,9 @@ public class MainVerticle extends AbstractVerticle { + if (hookHandler.handle(request)) { + return; + } +- if (nSyncHandler.handle(request)) { +- return; +- } ++ //if (nSyncHandler.handle(request)) { ++ // return; ++ //} + if (eventBusHandler.handle(request)) { + return; + } +@@ -777,7 +777,7 @@ public class MainVerticle extends AbstractVerticle { + final Promise storageLogPromise = newLoggingFuture.apply("storage-log"); + final Promise storageDataPromise = newLoggingFuture.apply("storage-data"); + final Promise metricsModulePromise = newLoggingFuture.apply("MetricsModule"); +- final Promise nsyncPromise = newLoggingFuture.apply("NSync"); ++ //final Promise nsyncPromise = newLoggingFuture.apply("NSync"); /*TODO revert*/ + CompositeFuture.all(futuresToWaitFor).onComplete(handler); + + String redisHost = (String) props.get("redis.host"); +@@ -979,10 +979,10 @@ public class MainVerticle extends AbstractVerticle { + //////////////////////////////////// NSync + /////////////////////////////////////////////////////////////////////////////////////////////////////// + { +- NSyncVerticleConfig nSyncVerticleConfig = +- new NSyncVerticleConfig().withBasePath(EAGLE_NSYNC_PATH).withMainPort(mainPort); +- DeploymentOptions deplOpt = +- new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); +- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise); ++ //NSyncVerticleConfig nSyncVerticleConfig = /*TODO revert*/ ++ // new NSyncVerticleConfig().withBasePath(EAGLE_NSYNC_PATH).withMainPort(mainPort); /*TODO revert*/ ++ //DeploymentOptions deplOpt = /*TODO revert*/ ++ // new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); /*TODO revert*/ ++ //vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise); /*TODO revert*/ + } + } + diff --git a/src/main/patch/eagle/simplelogger.patch b/src/main/patch/eagle/simplelogger.patch new file mode 100644 index 0000000..97cd8de --- /dev/null +++ b/src/main/patch/eagle/simplelogger.patch @@ -0,0 +1,33 @@ +diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml +index 5b226670..45acc276 100644 +--- a/eagle-process/pom.xml ++++ b/eagle-process/pom.xml +@@ -21,7 +21,27 @@ + true + + +- ++ ++ org.slf4j ++ slf4j-api ++ 1.7.25 ++ ++ ++ org.slf4j ++ slf4j-simple ++ 1.7.25 ++ ++ ++ org.slf4j ++ jcl-over-slf4j ++ 1.7.25 ++ ++ ++ org.slf4j ++ jul-to-slf4j ++ 1.7.25 ++ ++ + + ch.post.it.paisa.eagle + eagle-domain -- cgit v1.1 From 91c875e8f9292a0bab79ccf75249771e8b681d21 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 26 Oct 2023 01:39:17 +0200 Subject: (pcap) Add makeStats.lua script (aka pcapit) --- src/main/lua/pcap/makeStats.lua | 106 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100644 src/main/lua/pcap/makeStats.lua diff --git a/src/main/lua/pcap/makeStats.lua b/src/main/lua/pcap/makeStats.lua new file mode 100644 index 0000000..2874d9f --- /dev/null +++ b/src/main/lua/pcap/makeStats.lua @@ -0,0 +1,106 @@ + +local newPcapParser = assert(require("pcapit").newPcapParser) +local newPcapDumper = assert(require("pcapit").newPcapDumper) + +local main, onPcapFrame, printStats + + +function main() + local app = { + dumpr = false, + parser = false, + foundPortNumbers = {}, + youngestEpochSec = -math.huge, + oldestEpochSec = math.huge, + } + --app.dumpr = newPcapDumper{ + -- dumpFilePath = "/tmp/meins/my.out.pcap", + --} + app.parser = newPcapParser{ + dumpFilePath = "-", + onFrame = function(f)onPcapFrame(app, f)end, + } + app.parser:resume() + printStats(app) +end + + +function onPcapFrame( app, it ) + local out = io.stdout + local sec, usec = it:frameArrivalTime() + local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort() + --local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr() + --local isTcp = (it:tcpSeqNr() ~= nil) + -- + if sec < app.oldestEpochSec then app.oldestEpochSec = sec end + if sec > app.youngestEpochSec then app.youngestEpochSec = sec end + -- + if not app.foundPortNumbers[srcPort] then app.foundPortNumbers[srcPort] = 1 + else app.foundPortNumbers[srcPort] = app.foundPortNumbers[srcPort] + 1 end + if not app.foundPortNumbers[dstPort+100000] then app.foundPortNumbers[dstPort+100000] = 1 + else app.foundPortNumbers[dstPort+100000] = app.foundPortNumbers[dstPort+100000] + 1 end + -- + local portOfInterest = 7012 + if dstPort == portOfInterest then + local httpMethod, httpUri = + it:trspPayload():match("^([A-Z]+) ([^ ]+) [^ \r\n]+\r?\n") + if httpMethod then + out:write(string.format("%5d->%5d %s %s\n", srcPort, dstPort, httpMethod, httpUri)) + end + elseif srcPort == portOfInterest then + local httpStatus, httpPhrase = + it:trspPayload():match("^HTTP/%d.%d (%d%d%d) ([^\r\n]*)\r?\n") + if httpStatus then + out:write(string.format("%5d<-%5d %s %s\n", srcPort, dstPort, httpStatus, httpPhrase)) + end + end + --if srcPort ~= 53 and dstPort ~= 53 then return end + if app.dumpr then it:dumpTo(app.dumpr) end +end + + +function printStats( app ) + local out = io.stdout + local sorted = {} + local maxOccurValue = 0 + for port, pkgcnt in pairs(app.foundPortNumbers) do + if pkgcnt > maxOccurValue then maxOccurValue = pkgcnt end + table.insert(sorted, { port=port, pkgcnt=pkgcnt }) + end + table.sort(sorted, function(a, b)return a.pkgcnt > b.pkgcnt end) + local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec + local timeFmt = "!%Y-%m-%d_%H:%M:%SZ" + out:write("\n") + out:write("Statistics\n") + out:write("From: ")out:write(os.date(timeFmt,app.oldestEpochSec))out:write("\n") + out:write("To: ")out:write(os.date(timeFmt,app.youngestEpochSec))out:write("\n") + out:write("\n") + out:write(" .- Port (TCP/UDP)\n") + out:write(" | .-Direction (Send, Receive)\n") + out:write(" | | .- Frames per second\n") + out:write(".-+-. | .---+-. Amount of frames compared:\n") + local chartWidth = 60 + local cntPrinted = 0 + for i, elem in ipairs(sorted) do + local port, pkgcnt = elem.port, elem.pkgcnt + local dir = (port > 100000)and("R")or("S") + if port > 100000 then port = port - 100000 end + if port > 30000 then goto nextPort end + local pkgsPerSec = math.floor((pkgcnt / dumpDurationSec)*10+.5)/10 + out:write(string.format("%5d %s %7.1f |", port, dir, pkgsPerSec)) + local barLen = pkgcnt / maxOccurValue + --local barLen = (math.log(pkgcnt) / math.log(maxOccurValue)) + for i=1, chartWidth-1 do + out:write((i < (barLen*chartWidth))and("=")or(" ")) + end + out:write("|\n") + cntPrinted = cntPrinted + 1 + if cntPrinted >= 20 then break end + ::nextPort:: + end + out:write("\n") +end + + +main() + -- cgit v1.1 From d973f87420fc42960892262705c8f59e903dda05 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 27 Oct 2023 16:01:22 +0200 Subject: Dump changes --- doc/note/links/links.txt | 4 ++-- doc/note/tcpdump/tcpdump.txt | 5 +++++ src/main/eagle | 1 + src/main/lua/paisa-logs/DigHoustonLogs.lua | 2 +- src/main/patch/slarti/default.patch | 31 ++++++++++++++++++++++++++++++ 5 files changed, 40 insertions(+), 3 deletions(-) create mode 120000 src/main/eagle create mode 100644 src/main/patch/slarti/default.patch diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index bf28fb1..f565a26 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -155,6 +155,8 @@ Links (Aka argument amplifiers) ## Code Style format auto-formatters - [warning about formatters](https://gitit.post.ch/projects/ISA/repos/trin/pull-requests/79/overview?commentId=235667) +- [Linter produces crap](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/567/overview?commentId=237627) +- [Linter produces crap js if](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/598/overview?commentId=252867) - [static final java uppercase](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/82/overview?commentId=39126) - [invalid java class name](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/82/overview?commentId=39125) - [spotless produces crap](https://gitit.post.ch/projects/ISA/repos/poodoo/pull-requests/40/overview?commentId=263122) @@ -163,8 +165,6 @@ Links (Aka argument amplifiers) - [spotless produces crap](https://gitit.post.ch/projects/ISA/repos/trin/pull-requests/79) - [spotless produces crap](https://gitit.post.ch/projects/ISA/repos/houston/pull-requests/449/overview?commentId=263593) - [boolean expression formatting](https://gitit.post.ch/projects/ISA/repos/houston/pull-requests/461/overview?commentId=284022) -- [Linter produces crap](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/567/overview?commentId=237627) -- [Linter produces crap js if](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/598/overview?commentId=252867) - [Suddenly NEW formatting rules in PaISA since 2021](https://gitit.post.ch/projects/ISA/repos/watson/pull-requests/1/overview?commentId=234597) - "https://gitit.post.ch/projects/ISA/repos/zarniwoop/pull-requests/20/overview?commentId=85912" - "https://gitit.post.ch/projects/ISA/repos/zarniwoop/pull-requests/21/overview?commentId=87250" diff --git a/doc/note/tcpdump/tcpdump.txt b/doc/note/tcpdump/tcpdump.txt index 9c9feb6..d19c36b 100644 --- a/doc/note/tcpdump/tcpdump.txt +++ b/doc/note/tcpdump/tcpdump.txt @@ -31,6 +31,11 @@ Tips: "https://chrissanders.org/2018/06/large-captures4-filter-whittling/" +## Local dumps isa-only filter + + "portrange 6000-9999 and not port 7778" + + ## Accept HTTP POST requests: "tcp[((tcp[12:1] & 0xf0) >> 2):4] = 0x504F5354" diff --git a/src/main/eagle b/src/main/eagle new file mode 120000 index 0000000..f5160d6 --- /dev/null +++ b/src/main/eagle @@ -0,0 +1 @@ +C:/work/projects/isa-svc/eagle/.git/meins \ No newline at end of file diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua index d7da356..c9b510b 100644 --- a/src/main/lua/paisa-logs/DigHoustonLogs.lua +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -1,7 +1,7 @@ #!/usr/bin/env lua --[====================================================================[ - projDir='C:\path\to\proj\root' + projDir='/c/path/to/proj/root' export LUA_PATH="${projDir:?}/src/main/lua/paisa-logs/?.lua" lua -W "${projDir:?}/src/main/lua/paisa-logs/DigHoustonLogs.lua" diff --git a/src/main/patch/slarti/default.patch b/src/main/patch/slarti/default.patch new file mode 100644 index 0000000..2d910d7 --- /dev/null +++ b/src/main/patch/slarti/default.patch @@ -0,0 +1,31 @@ +diff --git a/slarti-web/pom.xml b/slarti-web/pom.xml +index 7933bdf86..3a1730377 100644 +--- a/slarti-web/pom.xml ++++ b/slarti-web/pom.xml +@@ -17,6 +17,26 @@ + war + + ++ ++ org.slf4j ++ slf4j-api ++ 2.0.1 ++ ++ ++ org.slf4j ++ slf4j-simple ++ 2.0.1 ++ ++ ++ org.slf4j ++ jcl-over-slf4j ++ 2.0.1 ++ ++ ++ org.slf4j ++ jul-to-slf4j ++ 2.0.1 ++ + + ch.post.it.webjars + linti -- cgit v1.1 From b0499b5ef664471b2da273133cd3d40d101740a1 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 28 Oct 2023 13:37:16 +0200 Subject: Add some notes. --- doc/note/gdb/gdb.txt | 10 +++++++ doc/note/qemu/build-libpcap.txt | 64 +++++++++++++++++++++++++++++++++++++++++ doc/note/qemu/qemu.txt | 8 ++++-- 3 files changed, 80 insertions(+), 2 deletions(-) create mode 100644 doc/note/gdb/gdb.txt create mode 100644 doc/note/qemu/build-libpcap.txt diff --git a/doc/note/gdb/gdb.txt b/doc/note/gdb/gdb.txt new file mode 100644 index 0000000..20cbd4d --- /dev/null +++ b/doc/note/gdb/gdb.txt @@ -0,0 +1,10 @@ + +## Print next few ASM instructions + + x/3i $pc + + +## Sources + +- [Print asm instructions](https://stackoverflow.com/a/59331366/4415884) + diff --git a/doc/note/qemu/build-libpcap.txt b/doc/note/qemu/build-libpcap.txt new file mode 100644 index 0000000..b86dd1c --- /dev/null +++ b/doc/note/qemu/build-libpcap.txt @@ -0,0 +1,64 @@ + +### Debian native +true \ + && PKGS_TO_ADD="curl ca-certificates gcc make libc6-dev flex bison" \ + && SUDO=sudo \ + && PKGINIT="$SUDO apt update" \ + && PKGADD="$SUDO apt install -y --no-install-recommends" \ + && HOST= \ + && true + + +### Alpine mingw cross +true \ + && PKGS_TO_ADD="binutils curl mingw-w64-gcc make tar flex bison" \ + && SUDO="/home/$USER/.local/bin/mysudo" \ + && PKGINIT=true \ + && PKGADD="$SUDO apk add" \ + && HOST=x86_64-w64-mingw32 \ + && true + + +## Generic +true \ + && PCAP_VERSION="1.10.4" \ + && CACHE_DIR="/var/tmp" \ + && WORKDIR="/tmp" \ + && INSTALL_ROOT="/usr/local" \ + && MAKE_JOBS=$(nproc) \ + && true + + +## Make +true \ + && PCAP_URL="https://github.com/the-tcpdump-group/libpcap/archive/refs/tags/libpcap-${PCAP_VERSION:?}.tar.gz" \ + && PCAP_SRCTGZ="${CACHE_DIR:?}/pcap-${PCAP_VERSION:?}.tgz" \ + && PCAP_BINTGZ="${PCAP_SRCTGZ%.*}-bin.tgz" \ + && if test -f "${PCAP_SRCTGZ:?}" ]; then true \ + && echo "[DEBUG] Already have \"${PCAP_SRCTGZ:?}\"" \ + ;else true \ + && echo curl -sSL "${PCAP_URL:?}" -o "${PCAP_SRCTGZ:?}" \ + && curl -sSL "${PCAP_URL:?}" -o "${PCAP_SRCTGZ:?}" \ + ;fi \ + && ( mkdir "${WORKDIR:?}/pcap-${PCAP_VERSION}" \ + && cd "${WORKDIR:?}/pcap-${PCAP_VERSION}" \ + && tar xf "${PCAP_SRCTGZ:?}" \ + && cd * \ + && ./configure --prefix=${WORKDIR:?}/pcap-"${PCAP_VERSION:?}"/out \ + && make clean \ + && make -j$(nproc) \ + && make install \ + && cd ../out \ + && rm bin/pcap-config lib/pkgconfig -rf \ + && tar cf "${PCAP_BINTGZ:?}" * \ + && printf '\n Build pcap Done\n\n' \ + ) \ + && true + + +### Install +true \ + && $SUDO mkdir -p "${INSTALL_ROOT:?}" \ + && $SUDO tar -C "${INSTALL_ROOT:?}" -xf "${PCAP_BINTGZ:?}" \ + && true + diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index fd85b1b..62cf11d 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -42,6 +42,9 @@ qemu-system-x86_64 \ -boot order=dc \ -cdrom "path/to/cd.iso" \ -hda "$(dirname "$(realpath "$0")")/hda.qcow2" \ + `# Host-Only Network` \ + -netdev user,id=n1,restrict=y \ + -device e1000,netdev=n1 \ `# 10.0.2.x network with host redirect` \ -netdev user,id=n0,hostfwd=tcp:127.0.0.1:${SSH_PORT:-2222}-:22 \ -device e1000,netdev=n0 \ @@ -53,8 +56,9 @@ qemu-system-x86_64 \ -device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet \ `# Choose ONE of those for graphic output` \ -nographic \ - -display gtk \ - -display sdl \ + -display sdl,grab-mod=rctrl \ + -display gtk,show-menubar=on \ + -display vnc=127.0.0.1:0 `#HINT: 0 is port 5900` \ ; ### Example manual adapter setup (inside VM) for socket mcast network: -- cgit v1.1 From 9afda1c376a4a56fe3cffc07a425ae84cee3ebe7 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 28 Oct 2023 20:55:26 +0200 Subject: Update some quemu doc about networking. --- doc/note/qemu/qemu.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 62cf11d..214fb81 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -42,14 +42,14 @@ qemu-system-x86_64 \ -boot order=dc \ -cdrom "path/to/cd.iso" \ -hda "$(dirname "$(realpath "$0")")/hda.qcow2" \ - `# Host-Only Network` \ - -netdev user,id=n1,restrict=y \ + `# Isolated Network` \ + -netdev user,id=n1,ipv6=off,restrict=y \ -device e1000,netdev=n1 \ `# 10.0.2.x network with host redirect` \ - -netdev user,id=n0,hostfwd=tcp:127.0.0.1:${SSH_PORT:-2222}-:22 \ + -netdev user,id=n0,ipv6=off,hostfwd=tcp:127.0.0.1:${SSH_PORT:-2222}-:22 \ -device e1000,netdev=n0 \ `# socket mcast shared network adapter` \ - -netdev socket,id=n1,mcast=230.0.0.1:1234 \ + -netdev socket,id=n1,ipv6=off,mcast=230.0.0.1:1234 \ -device e1000,netdev=n1 \ `# Fix broken host systems` \ -L "${QEMU_HOME:?}/Bios" -bios "${QEMU_HOME:?}/Bios/bios-256k.bin" \ -- cgit v1.1 From 2d99307701ac69fdbfaa758acbaef4b0fb5ed0d8 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 29 Oct 2023 02:25:22 +0200 Subject: Tinker around to produce some stats out of pcap files. --- src/main/lua/pcap/httpStats.lua | 118 ++++++++++++++++++++++++++++++++++++ src/main/lua/pcap/makeStats.lua | 106 -------------------------------- src/main/lua/pcap/tcpPortStats.lua | 82 +++++++++++++++++++++++++ src/main/lua/pcap/xServiceStats.lua | 90 +++++++++++++++++++++++++++ 4 files changed, 290 insertions(+), 106 deletions(-) create mode 100644 src/main/lua/pcap/httpStats.lua delete mode 100644 src/main/lua/pcap/makeStats.lua create mode 100644 src/main/lua/pcap/tcpPortStats.lua create mode 100644 src/main/lua/pcap/xServiceStats.lua diff --git a/src/main/lua/pcap/httpStats.lua b/src/main/lua/pcap/httpStats.lua new file mode 100644 index 0000000..e4a3aaa --- /dev/null +++ b/src/main/lua/pcap/httpStats.lua @@ -0,0 +1,118 @@ + +local newPcapParser = assert(require("pcapit").newPcapParser) + +local main, onPcapFrame, vapourizeUrlVariables, printHttpRequestStats + + +function main() + local app = { + parser = false, + foundHttpRequests = {}, + youngestEpochSec = -math.huge, + oldestEpochSec = math.huge, + } + app.parser = newPcapParser{ + dumpFilePath = "-", + onFrame = function(f)onPcapFrame(app, f)end, + } + app.parser:resume() + printHttpRequestStats(app) +end + + +function onPcapFrame( app, it ) + local out = io.stdout + local sec, usec = it:frameArrivalTime() + local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort() + -- + if sec < app.oldestEpochSec then app.oldestEpochSec = sec end + if sec > app.youngestEpochSec then app.youngestEpochSec = sec end + -- + local portOfInterest = 7012 + if dstPort == portOfInterest then + local httpMethod, httpUri = + it:trspPayload():match("^([A-Z]+) ([^ ]+) [^ \r\n]+\r?\n") + if httpMethod then + --out:write(string.format("%5d->%5d %s %s\n", srcPort, dstPort, httpMethod, httpUri)) + httpUri = vapourizeUrlVariables(app, httpUri) + local key = httpUri -- httpMethod .." ".. httpUri + local obj = app.foundHttpRequests[key] + if not obj then + obj = { count=0, httpMethod=false, httpUri=false, } + app.foundHttpRequests[key] = obj + end + obj.count = obj.count + 1 + obj.httpMethod = httpMethod + obj.httpUri = httpUri + end + elseif srcPort == portOfInterest then + local httpStatus, httpPhrase = + it:trspPayload():match("^HTTP/%d.%d (%d%d%d) ([^\r\n]*)\r?\n") + if httpStatus then + --out:write(string.format("%5d<-%5d %s %s\n", srcPort, dstPort, httpStatus, httpPhrase)) + end + end +end + + +function vapourizeUrlVariables( app, uri ) + -- A very specific case + uri = uri:gsub("^(/houston/users/)%d+(/.*)$", "%1{}%2"); + if uri:find("^/houston/users/[^/]+/user/.*$") then return uri end + -- + -- Try to do some clever guesses to group URIs wich only differ in variable segments + uri = uri:gsub("(/|-)[%dI_-]+/", "%1{}/"):gsub("(/|-)[%dI-]+/", "%1{}/") -- two turns, to also get consecutive number segments + uri = uri:gsub("([/-])[%dI_-]+$", "%1{}") + uri = uri:gsub("/%d+(%.%w+)$", "/{}%1") + uri = uri:gsub("(/|-)[%w%d]+%-[%w%d]+%-[%w%d]+%-[%w%d]+%-[%w%d]+(/?)$", "%1{}%2") + uri = uri:gsub("/v%d/", "/v0/") -- Merge all API versions + -- + -- Generify remaining by trimming URIs from right + uri = uri:gsub("^(/from%-houston/[^/]+/eagle/nsync/).*$", "%1...") + uri = uri:gsub("^(/from%-houston/[^/]+/eagle/fis/information/).*$", "%1...") + uri = uri:gsub("^(/from%-houston/[^/]+/eagle/nsync/v%d/push/trillian%-phonebooks%-).*$", "%1...") + uri = uri:gsub("^(/from%-houston/[^/]+/eagle/timetable/wait/).*$", "%1...") + uri = uri:gsub("^(/houston/service%-instances/).*$", "%1...") + uri = uri:gsub("^(/vortex/stillInterested%?vehicleId%=).*$", "%1...") + uri = uri:gsub("^(/houston/[^/]+/[^/]+/).*$", "%1...") + return uri +end + + +function printHttpRequestStats( app ) + local out = io.stdout + local sorted = {} + local maxOccurValue = 0 + local overallCount = 0 + for _, reqObj in pairs(app.foundHttpRequests) do + if reqObj.count > maxOccurValue then maxOccurValue = reqObj.count end + overallCount = overallCount + reqObj.count + table.insert(sorted, reqObj) + end + table.sort(sorted, function(a, b)return a.count > b.count end) + local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec + local timeFmt = "!%Y-%m-%d_%H:%M:%SZ" + out:write("\n") + out:write(string.format(" Subject HTTP Request Statistics\n")) + out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec))) + out:write(string.format(" Duration %d seconds\n", dumpDurationSec)) + out:write(string.format("Throughput %.1f HTTP requests per second\n", overallCount / dumpDurationSec)) + out:write("\n") + out:write(" .-- HTTP Requests per Second\n") + out:write(" | .-- URI\n") + out:write(".--+--. .-+---------\n") + local chartWidth = 60 + local cntPrinted = 0 + for i, elem in ipairs(sorted) do + local count, httpMethod, httpUri = elem.count, elem.httpMethod, elem.httpUri + local cntPerSec = math.floor((count / dumpDurationSec)*10+.5)/10 + out:write(string.format("%7.1f %s\n", cntPerSec, httpUri)) + cntPrinted = cntPrinted + 1 + ::nextPort:: + end + out:write("\n") +end + + +main() + diff --git a/src/main/lua/pcap/makeStats.lua b/src/main/lua/pcap/makeStats.lua deleted file mode 100644 index 2874d9f..0000000 --- a/src/main/lua/pcap/makeStats.lua +++ /dev/null @@ -1,106 +0,0 @@ - -local newPcapParser = assert(require("pcapit").newPcapParser) -local newPcapDumper = assert(require("pcapit").newPcapDumper) - -local main, onPcapFrame, printStats - - -function main() - local app = { - dumpr = false, - parser = false, - foundPortNumbers = {}, - youngestEpochSec = -math.huge, - oldestEpochSec = math.huge, - } - --app.dumpr = newPcapDumper{ - -- dumpFilePath = "/tmp/meins/my.out.pcap", - --} - app.parser = newPcapParser{ - dumpFilePath = "-", - onFrame = function(f)onPcapFrame(app, f)end, - } - app.parser:resume() - printStats(app) -end - - -function onPcapFrame( app, it ) - local out = io.stdout - local sec, usec = it:frameArrivalTime() - local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort() - --local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr() - --local isTcp = (it:tcpSeqNr() ~= nil) - -- - if sec < app.oldestEpochSec then app.oldestEpochSec = sec end - if sec > app.youngestEpochSec then app.youngestEpochSec = sec end - -- - if not app.foundPortNumbers[srcPort] then app.foundPortNumbers[srcPort] = 1 - else app.foundPortNumbers[srcPort] = app.foundPortNumbers[srcPort] + 1 end - if not app.foundPortNumbers[dstPort+100000] then app.foundPortNumbers[dstPort+100000] = 1 - else app.foundPortNumbers[dstPort+100000] = app.foundPortNumbers[dstPort+100000] + 1 end - -- - local portOfInterest = 7012 - if dstPort == portOfInterest then - local httpMethod, httpUri = - it:trspPayload():match("^([A-Z]+) ([^ ]+) [^ \r\n]+\r?\n") - if httpMethod then - out:write(string.format("%5d->%5d %s %s\n", srcPort, dstPort, httpMethod, httpUri)) - end - elseif srcPort == portOfInterest then - local httpStatus, httpPhrase = - it:trspPayload():match("^HTTP/%d.%d (%d%d%d) ([^\r\n]*)\r?\n") - if httpStatus then - out:write(string.format("%5d<-%5d %s %s\n", srcPort, dstPort, httpStatus, httpPhrase)) - end - end - --if srcPort ~= 53 and dstPort ~= 53 then return end - if app.dumpr then it:dumpTo(app.dumpr) end -end - - -function printStats( app ) - local out = io.stdout - local sorted = {} - local maxOccurValue = 0 - for port, pkgcnt in pairs(app.foundPortNumbers) do - if pkgcnt > maxOccurValue then maxOccurValue = pkgcnt end - table.insert(sorted, { port=port, pkgcnt=pkgcnt }) - end - table.sort(sorted, function(a, b)return a.pkgcnt > b.pkgcnt end) - local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec - local timeFmt = "!%Y-%m-%d_%H:%M:%SZ" - out:write("\n") - out:write("Statistics\n") - out:write("From: ")out:write(os.date(timeFmt,app.oldestEpochSec))out:write("\n") - out:write("To: ")out:write(os.date(timeFmt,app.youngestEpochSec))out:write("\n") - out:write("\n") - out:write(" .- Port (TCP/UDP)\n") - out:write(" | .-Direction (Send, Receive)\n") - out:write(" | | .- Frames per second\n") - out:write(".-+-. | .---+-. Amount of frames compared:\n") - local chartWidth = 60 - local cntPrinted = 0 - for i, elem in ipairs(sorted) do - local port, pkgcnt = elem.port, elem.pkgcnt - local dir = (port > 100000)and("R")or("S") - if port > 100000 then port = port - 100000 end - if port > 30000 then goto nextPort end - local pkgsPerSec = math.floor((pkgcnt / dumpDurationSec)*10+.5)/10 - out:write(string.format("%5d %s %7.1f |", port, dir, pkgsPerSec)) - local barLen = pkgcnt / maxOccurValue - --local barLen = (math.log(pkgcnt) / math.log(maxOccurValue)) - for i=1, chartWidth-1 do - out:write((i < (barLen*chartWidth))and("=")or(" ")) - end - out:write("|\n") - cntPrinted = cntPrinted + 1 - if cntPrinted >= 20 then break end - ::nextPort:: - end - out:write("\n") -end - - -main() - diff --git a/src/main/lua/pcap/tcpPortStats.lua b/src/main/lua/pcap/tcpPortStats.lua new file mode 100644 index 0000000..9038db7 --- /dev/null +++ b/src/main/lua/pcap/tcpPortStats.lua @@ -0,0 +1,82 @@ + +local newPcapParser = assert(require("pcapit").newPcapParser) + +local out, log = io.stdout, io.stderr +local main, onPcapFrame, printStats + + +function main() + local app = { + parser = false, + youngestEpochSec = -math.huge, + oldestEpochSec = math.huge, + foundPortNumbers = {}, + } + app.parser = newPcapParser{ + dumpFilePath = "-", + onFrame = function(f)onPcapFrame(app, f)end, + } + app.parser:resume() + printStats(app) +end + + +function onPcapFrame( app, it ) + local sec, usec = it:frameArrivalTime() + local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort() + --local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr() + --local isTcp = (it:tcpSeqNr() ~= nil) + -- + if sec < app.oldestEpochSec then app.oldestEpochSec = sec end + if sec > app.youngestEpochSec then app.youngestEpochSec = sec end + -- + if not app.foundPortNumbers[srcPort] then app.foundPortNumbers[srcPort] = 1 + else app.foundPortNumbers[srcPort] = app.foundPortNumbers[srcPort] + 1 end + if not app.foundPortNumbers[dstPort+100000] then app.foundPortNumbers[dstPort+100000] = 1 + else app.foundPortNumbers[dstPort+100000] = app.foundPortNumbers[dstPort+100000] + 1 end +end + + +function printStats( app ) + local sorted = {} + local totalPackets, maxOccurValue = 0, 0 + for port, pkgcnt in pairs(app.foundPortNumbers) do + if pkgcnt > maxOccurValue then maxOccurValue = pkgcnt end + table.insert(sorted, { port=port, pkgcnt=pkgcnt }) + totalPackets = totalPackets + pkgcnt + end + table.sort(sorted, function(a, b)return a.pkgcnt > b.pkgcnt end) + local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec + local timeFmt = "!%Y-%m-%d_%H:%M:%SZ" + out:write("\n") + out:write(string.format(" Subject TCP/UDP stats\n")) + out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec))) + out:write(string.format(" Duration %d seconds\n", dumpDurationSec)) + out:write(string.format("Throughput %.1f packets per second\n", totalPackets / dumpDurationSec)) + out:write("\n") + out:write(" .- TCP/UDP Port\n") + out:write(" | .-Direction (Send, Receive)\n") + out:write(" | | .- Packets per second\n") + out:write(".-+-. | .---+-.\n") + local chartWidth = 60 + for i, elem in ipairs(sorted) do + local port, pkgcnt = elem.port, elem.pkgcnt + local dir = (port > 100000)and("R")or("S") + if port > 100000 then port = port - 100000 end + if port > 30000 then goto nextPort end + local pkgsPerSec = math.floor((pkgcnt / dumpDurationSec)*10+.5)/10 + out:write(string.format("%5d %s %7.1f |", port, dir, pkgsPerSec)) + local barLen = pkgcnt / maxOccurValue + --local barLen = (math.log(pkgcnt) / math.log(maxOccurValue)) + for i=1, chartWidth-1 do + out:write((i < (barLen*chartWidth))and("=")or(" ")) + end + out:write("|\n") + ::nextPort:: + end + out:write("\n") +end + + +main() + diff --git a/src/main/lua/pcap/xServiceStats.lua b/src/main/lua/pcap/xServiceStats.lua new file mode 100644 index 0000000..1cc5961 --- /dev/null +++ b/src/main/lua/pcap/xServiceStats.lua @@ -0,0 +1,90 @@ + +local newPcapParser = assert(require("pcapit").newPcapParser) + +local out, log = io.stdout, io.stderr +local main, onPcapFrame, vapourizeUrlVariables, printStats + + +function main() + local app = { + parser = false, + youngestEpochSec = -math.huge, + oldestEpochSec = math.huge, + services = {}, + } + app.parser = newPcapParser{ + dumpFilePath = "-", + onFrame = function(f)onPcapFrame(app, f)end, + } + app.parser:resume() + printStats(app) +end + + +function onPcapFrame( app, it ) + local sec, usec = it:frameArrivalTime() + local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort() + -- + if sec < app.oldestEpochSec then app.oldestEpochSec = sec end + if sec > app.youngestEpochSec then app.youngestEpochSec = sec end + -- + local portsOfInterest = { + [ 80] = true, + [8080] = true, + [7012] = true, + } + --if not portsOfInterest[dstPort] and not portsOfInterest[srcPort] then return end + local trspPayload = it:trspPayload() + local httpReqLinePart1, httpReqLinePart2, httpReqLinePart3 = + trspPayload:match("^([A-Z/1.0]+) ([^ ]+) [^ \r\n]+\r?\n") + if not httpReqLinePart1 then return end + if httpReqLinePart1:find("^HTTP/1.%d$") then return end + --log:write(string.format("%5d->%5d %s %s %s\n", srcPort, dstPort, httpReqLinePart1, httpReqLinePart2, httpReqLinePart3)) + xService = trspPayload:match("\n[Xx]%-[Ss][Ee][Rr][Vv][Ii][Cc][Ee]:%s+([^\r\n]+)\r?\n"); + if not xService then return end + --log:write("X-Service is '".. xService .."'\n") + local obj = app.services[xService] + if not obj then + app.services[xService] = { + xService = xService, + count=0, + } + else + assert(xService == obj.xService) + obj.count = obj.count + 1 + end +end + + +function printStats( app ) + local sorted = {} + local maxOccurValue = 0 + local overallCount = 0 + for _, reqObj in pairs(app.services) do + if reqObj.count > maxOccurValue then maxOccurValue = reqObj.count end + overallCount = overallCount + reqObj.count + table.insert(sorted, reqObj) + end + table.sort(sorted, function(a, b)return a.count > b.count end) + local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec + local timeFmt = "!%Y-%m-%d_%H:%M:%SZ" + out:write("\n") + out:write(string.format(" Subject Pressure by Services\n")) + out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec))) + out:write(string.format(" Duration %d seconds\n", dumpDurationSec)) + out:write(string.format("Throughput %.1f HTTP requests per second\n", overallCount / dumpDurationSec)) + out:write("\n") + out:write(" .-- HTTP Requests per Second\n") + out:write(" | .-- Service\n") + out:write(".-+---. .-+-----\n") + for i, elem in ipairs(sorted) do + local xService, count = elem.xService, elem.count + local countPerSecond = math.floor((count / dumpDurationSec)*10+.5)/10 + out:write(string.format("%7.1f %s\n", countPerSecond, xService)) + end + out:write("\n") +end + + +main() + -- cgit v1.1 From 29ff6f228cbf6aafef687c5bf4b2986ff0a30fba Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 31 Oct 2023 17:24:46 +0100 Subject: Add more kludge to hopefully ban unwanted java loggers. --- doc/note/links/links.txt | 4 + doc/note/maven-pom/howto-ban-unwanted-sh__.txt | 41 ++++++++++ src/main/patch/houston/default.patch | 104 +++++++++++++++++++++++++ 3 files changed, 149 insertions(+) create mode 100644 doc/note/maven-pom/howto-ban-unwanted-sh__.txt diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index f565a26..b0c97e5 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -243,6 +243,7 @@ Links (Aka argument amplifiers) - [Houston Optimize EnqueuePatrol](https://jira.post.ch/browse/SDCISA-2876) - [Update beim Fahrzeughersteller dauert zu lange](https://jira.post.ch/browse/SDCISA-9059) - [vortex too slow](https://jira.post.ch/browse/SDCISA-9990) +- [2023-10-27 OOM nun auch auf Eagle](https://wikit.post.ch/x/c2U1Tw) ## Performance is not an issue ... - [Houston OOM 2023-06-27](https://wikit.post.ch/x/_Bv6Rw) @@ -537,3 +538,6 @@ Links (Aka argument amplifiers) ## Qemu is Crap - [Qemu for Windows Host Quirks](https://wonghoi.humgar.com/blog/2021/05/03/qemu-for-windows-host-quirks/) +## Git paisa complain about rebase stuff +- [Complain about force-pushes](https://gitit.post.ch/projects/ISA/repos/lazlar/pull-requests/3/overview?commentId=311142) + diff --git a/doc/note/maven-pom/howto-ban-unwanted-sh__.txt b/doc/note/maven-pom/howto-ban-unwanted-sh__.txt new file mode 100644 index 0000000..1edad9b --- /dev/null +++ b/doc/note/maven-pom/howto-ban-unwanted-sh__.txt @@ -0,0 +1,41 @@ + + + + + + org.apache.logging.log4j + log4j-slf4j-impl + [0.0.0,) + provided + + + + + org.apache.maven.plugins + maven-enforcer-plugin + + 3.4.1 + + + enforce + + + + + org.apache.logging.log4j:log4j-slf4j-impl + + + + + + + + + + + +## Sources + +- [How to globally exclude mvn dependency](https://stackoverflow.com/a/39979760/4415884) +- [How to yell about bannded dependencies](https://stackoverflow.com/a/46104531/4415884) + diff --git a/src/main/patch/houston/default.patch b/src/main/patch/houston/default.patch index d70b12b..4fcfa8c 100644 --- a/src/main/patch/houston/default.patch +++ b/src/main/patch/houston/default.patch @@ -72,3 +72,107 @@ index 432efb01..d1729fe9 100644 RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with() +diff --git a/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java b/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java +new file mode 100644 +index 00000000..aa3aa2e0 +--- /dev/null ++++ b/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java +@@ -0,0 +1,98 @@ ++package org.apache.logging.slf4j; ++ ++import org.apache.logging.log4j.spi.ExtendedLogger; ++import org.slf4j.Marker; ++import org.slf4j.event.Level; ++import org.slf4j.spi.LocationAwareLogger; ++import org.slf4j.spi.LoggingEventBuilder; ++ ++import java.io.Serializable; ++ ++ ++/**

FU** this fu***** damn sh** code that still tries to use log4j, no matter ++ * how strong we tell it NOT to use it!

++ *

This class only exists to prevent services from starting if IDEA still did miss ++ * the dependency changes in pom and still tries to use the wrong logger impl.

*/ ++public class Log4jLogger implements LocationAwareLogger, Serializable { ++ ++ private final org.slf4j.Logger log; ++ ++ Log4jLogger(final Log4jMarkerFactory markerFactory, final ExtendedLogger logger, final String name) { ++ this.log = new org.slf4j.simple.SimpleLoggerFactory().getLogger(name); ++ } ++ ++ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) { ++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet"); ++ } ++ ++ @Override public String getName() { return log.getName(); } ++ @Override public LoggingEventBuilder makeLoggingEventBuilder(Level level) { return log.makeLoggingEventBuilder(level); } ++ @Override public LoggingEventBuilder atLevel(Level level) { return log.atLevel(level); } ++ @Override public boolean isEnabledForLevel(Level level) { return log.isEnabledForLevel(level); } ++ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); } ++ @Override public void trace(String s) { log.trace(s); } ++ @Override public void trace(String s, Object o) { log.trace(s, o); } ++ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); } ++ @Override public void trace(String s, Object... objects) { log.trace(s, objects); } ++ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); } ++ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); } ++ @Override public LoggingEventBuilder atTrace() { return log.atTrace(); } ++ @Override public void trace(Marker marker, String s) { log.trace(marker, s); } ++ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); } ++ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); } ++ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); } ++ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); } ++ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); } ++ @Override public void debug(String s) { log.debug(s); } ++ @Override public void debug(String s, Object o) { log.debug(s, o); } ++ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); } ++ @Override public void debug(String s, Object... objects) { log.debug(s, objects); } ++ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); } ++ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); } ++ @Override public void debug(Marker marker, String s) { log.debug(marker, s); } ++ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); } ++ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); } ++ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); } ++ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); } ++ @Override public LoggingEventBuilder atDebug() { return log.atDebug(); } ++ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); } ++ @Override public void info(String s) { log.info(s); } ++ @Override public void info(String s, Object o) { log.info(s, o); } ++ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); } ++ @Override public void info(String s, Object... objects) { log.info(s, objects); } ++ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); } ++ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); } ++ @Override public void info(Marker marker, String s) { log.info(marker, s); } ++ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); } ++ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); } ++ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); } ++ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); } ++ @Override public LoggingEventBuilder atInfo() { return log.atInfo(); } ++ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); } ++ @Override public void warn(String s) { log.warn(s); } ++ @Override public void warn(String s, Object o) { log.warn(s, o); } ++ @Override public void warn(String s, Object... objects) { log.warn(s, objects); } ++ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); } ++ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); } ++ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); } ++ @Override public void warn(Marker marker, String s) { log.warn(marker, s); } ++ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); } ++ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); } ++ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); } ++ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); } ++ @Override public LoggingEventBuilder atWarn() { return log.atWarn(); } ++ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); } ++ @Override public void error(String s) { log.error(s); } ++ @Override public void error(String s, Object o) { log.error(s, o); } ++ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); } ++ @Override public void error(String s, Object... objects) { log.error(s, objects); } ++ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); } ++ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); } ++ @Override public void error(Marker marker, String s) { log.error(marker, s); } ++ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); } ++ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); } ++ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); } ++ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); } ++ @Override public LoggingEventBuilder atError() { return log.atError(); } ++ ++} -- cgit v1.1 From b8bce299375d44feee4c159c8c86a6d16417452d Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 31 Oct 2023 17:35:05 +0100 Subject: Backup preflux patch --- src/main/patch/preflux/default.patch | 85 ++++++++++++++++++++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 src/main/patch/preflux/default.patch diff --git a/src/main/patch/preflux/default.patch b/src/main/patch/preflux/default.patch new file mode 100644 index 0000000..acbcf0f --- /dev/null +++ b/src/main/patch/preflux/default.patch @@ -0,0 +1,85 @@ + + TODO describe (like in houston) + + +diff --git a/preflux-web/pom.xml b/preflux-web/pom.xml +index 752be702..8f91c053 100644 +--- a/preflux-web/pom.xml ++++ b/preflux-web/pom.xml +@@ -14,6 +14,26 @@ + war + + ++ ++ org.slf4j ++ slf4j-api ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ slf4j-simple ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ jcl-over-slf4j ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ jul-to-slf4j ++ ${slf4j.version} ++ + + + ch.post.it.paisa.alice +diff --git a/preflux-test/pom.xml b/preflux-test/pom.xml +index c50afbe5..115556c4 100644 +--- a/preflux-test/pom.xml ++++ b/preflux-test/pom.xml +@@ -16,6 +16,26 @@ + + + ++ ++ org.slf4j ++ slf4j-api ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ slf4j-simple ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ jcl-over-slf4j ++ ${slf4j.version} ++ ++ ++ org.slf4j ++ jul-to-slf4j ++ ${slf4j.version} ++ + + + ch.post.it.paisa.alice +diff --git a/preflux-web/package.json b/preflux-web/package.json +index eda8e051..5353e179 100644 +--- a/preflux-web/package.json ++++ b/preflux-web/package.json +@@ -10,10 +10,10 @@ + "check": "npm run format:check && npm run lint && npm run test:no-watch", + "check:ci": "npm run format:check && npm run lint", + "check:fix": "npm run format:fix && npm run lint:fix && npm run test:no-watch", +- "format:check": "prettier --check \"src/main/angular/**/*.{ts,html,css,json}\"", +- "format:fix": "prettier --write \"src/main/angular/**/*.{ts,html,css,json}\"", +- "lint": "ng lint --format verbose", +- "lint:fix": "ng lint --fix", ++ "format:check": "true", ++ "format:fix": "true", ++ "lint": "true", ++ "lint:fix": "true", + "test": "ng test --watch=true --browsers=ChromeHeadlessNoSandbox", + "test:ci": "npm run test:no-watch", + "test:no-watch": "ng test --watch=false --browsers=ChromeHeadlessNoSandbox" -- cgit v1.1 From d813e8b7ff3f795ab4bdb0bf7a8a22b872a7c5e7 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 31 Oct 2023 19:06:41 +0100 Subject: Fix preflux patch for changes on develop --- src/main/patch/preflux/default.patch | 127 ++++++++++++++++++++++++++++++++++- 1 file changed, 124 insertions(+), 3 deletions(-) diff --git a/src/main/patch/preflux/default.patch b/src/main/patch/preflux/default.patch index acbcf0f..b3531b4 100644 --- a/src/main/patch/preflux/default.patch +++ b/src/main/patch/preflux/default.patch @@ -74,12 +74,133 @@ index eda8e051..5353e179 100644 "check:fix": "npm run format:fix && npm run lint:fix && npm run test:no-watch", - "format:check": "prettier --check \"src/main/angular/**/*.{ts,html,css,json}\"", - "format:fix": "prettier --write \"src/main/angular/**/*.{ts,html,css,json}\"", -- "lint": "ng lint --format verbose", +- "lint": "ng lint", - "lint:fix": "ng lint --fix", + "format:check": "true", + "format:fix": "true", + "lint": "true", + "lint:fix": "true", - "test": "ng test --watch=true --browsers=ChromeHeadlessNoSandbox", + "test": "ng test --watch --browsers=ChromeHeadlessNoSandbox", "test:ci": "npm run test:no-watch", - "test:no-watch": "ng test --watch=false --browsers=ChromeHeadlessNoSandbox" + "test:no-watch": "ng test --no-watch --browsers=ChromeHeadlessNoSandbox" +diff --git a/preflux-web/src/main/java/org/apache/logging/slf4j/Log4jLogger.java b/preflux-web/src/main/java/org/apache/logging/slf4j/Log4jLogger.java +new file mode 100644 +index 00000000..e437dc34 +--- /dev/null ++++ b/preflux-web/src/main/java/org/apache/logging/slf4j/Log4jLogger.java +@@ -0,0 +1,115 @@ ++package org.apache.logging.slf4j; ++ ++import org.apache.logging.log4j.spi.ExtendedLogger; ++import org.slf4j.Logger; ++import org.slf4j.Marker; ++import org.slf4j.spi.LocationAwareLogger; ++ ++import java.io.Serializable; ++import java.lang.reflect.Constructor; ++import java.lang.reflect.InvocationTargetException; ++import java.lang.reflect.Method; ++ ++ ++/**

This class only exists to really, really, really, really use the wanted ++ * logger impl. And only the one choosen logger impl and no other log impl. In ++ * fact there should be no reason for this class to exist. But it seems as some ++ * code still manages to stubbornly use some unwanted logger impls occasionally, ++ * for whatever reason. As it seems impossible to configure this properly, this ++ * class here at least make it fail-fast, before make devs wasting time searching ++ * expected logs which magically never appear.

++ */ ++public class Log4jLogger implements LocationAwareLogger, Serializable { ++ ++ private static final Method getLoggerFn; ++ private static final Object loggerFactory; ++ private final Logger log; ++ ++ static { ++ try { ++ Class slfClass = Class.forName("org.slf4j.simple.SimpleLoggerFactory"); ++ getLoggerFn = slfClass.getDeclaredMethod("getLogger", String.class); ++ Constructor ctor = slfClass.getConstructor(); ++ ctor.setAccessible(true); ++ loggerFactory = ctor.newInstance(); ++ } catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | InstantiationException | ++ IllegalAccessException ex) { ++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex); ++ } ++ } ++ ++ Log4jLogger(Object markerFactory, ExtendedLogger logger, final String name) { ++ try { ++ this.log = (Logger) getLoggerFn.invoke(loggerFactory, name); ++ } catch (InvocationTargetException | IllegalAccessException ex) { ++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex); ++ } ++ } ++ ++ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) { ++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet"); ++ } ++ ++ @Override public String getName() { return log.getName(); } ++ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); } ++ @Override public void trace(String s) { log.trace(s); } ++ @Override public void trace(String s, Object o) { log.trace(s, o); } ++ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); } ++ @Override public void trace(String s, Object... objects) { log.trace(s, objects); } ++ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); } ++ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); } ++ @Override public void trace(Marker marker, String s) { log.trace(marker, s); } ++ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); } ++ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); } ++ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); } ++ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); } ++ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); } ++ @Override public void debug(String s) { log.debug(s); } ++ @Override public void debug(String s, Object o) { log.debug(s, o); } ++ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); } ++ @Override public void debug(String s, Object... objects) { log.debug(s, objects); } ++ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); } ++ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); } ++ @Override public void debug(Marker marker, String s) { log.debug(marker, s); } ++ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); } ++ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); } ++ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); } ++ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); } ++ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); } ++ @Override public void info(String s) { log.info(s); } ++ @Override public void info(String s, Object o) { log.info(s, o); } ++ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); } ++ @Override public void info(String s, Object... objects) { log.info(s, objects); } ++ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); } ++ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); } ++ @Override public void info(Marker marker, String s) { log.info(marker, s); } ++ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); } ++ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); } ++ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); } ++ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); } ++ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); } ++ @Override public void warn(String s) { log.warn(s); } ++ @Override public void warn(String s, Object o) { log.warn(s, o); } ++ @Override public void warn(String s, Object... objects) { log.warn(s, objects); } ++ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); } ++ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); } ++ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); } ++ @Override public void warn(Marker marker, String s) { log.warn(marker, s); } ++ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); } ++ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); } ++ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); } ++ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); } ++ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); } ++ @Override public void error(String s) { log.error(s); } ++ @Override public void error(String s, Object o) { log.error(s, o); } ++ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); } ++ @Override public void error(String s, Object... objects) { log.error(s, objects); } ++ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); } ++ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); } ++ @Override public void error(Marker marker, String s) { log.error(marker, s); } ++ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); } ++ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); } ++ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); } ++ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); } ++ ++} -- cgit v1.1 From 78d653c30ed601fe5eca93f02899080a758df54a Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 31 Oct 2023 21:36:07 +0100 Subject: Add some more pcap stats. --- src/main/lua/pcap/extractDnsHosts.lua | 147 +++++++++++++++++++++++++++++++ src/main/lua/pcap/tcpDataAmountStats.lua | 97 ++++++++++++++++++++ src/main/lua/pcap/xServiceStats.lua | 8 +- 3 files changed, 248 insertions(+), 4 deletions(-) create mode 100644 src/main/lua/pcap/extractDnsHosts.lua create mode 100644 src/main/lua/pcap/tcpDataAmountStats.lua diff --git a/src/main/lua/pcap/extractDnsHosts.lua b/src/main/lua/pcap/extractDnsHosts.lua new file mode 100644 index 0000000..655586f --- /dev/null +++ b/src/main/lua/pcap/extractDnsHosts.lua @@ -0,0 +1,147 @@ + +local newPcapParser = assert(require("pcapit").newPcapParser) +local out, log = io.stdout, io.stderr + +local main, onPcapFrame, vapourizeUrlVariables, printResult + + +function main() + local app = { + parser = false, + youngestEpochSec = -math.huge, + oldestEpochSec = math.huge, + dnsResponses = {}, + } + app.parser = newPcapParser{ + dumpFilePath = "-", + onFrame = function(f)onPcapFrame(app, f)end, + } + app.parser:resume() + printResult(app) +end + + +function onPcapFrame( app, it ) + local out = io.stdout + local sec, usec = it:frameArrivalTime() + sec = sec + (usec/1e6) + if sec < app.oldestEpochSec then app.oldestEpochSec = sec end + if sec > app.youngestEpochSec then app.youngestEpochSec = sec end + -- + if it:trspSrcPort() == 53 then + extractHostnameFromDns(app, it) + elseif it:tcpSeqNr() then + extractHostnameFromHttpHeaders(app, it) + end +end + + +function extractHostnameFromDns( app, it ) + local payload = it:trspPayload() + local bug = 8 -- TODO looks as lib has a bug and payload is offset by some bytes. + local dnsFlags = (payload:byte(bug+3) << 8) | (payload:byte(bug+4)) + if (dnsFlags & 0x0004) ~= 0 then return end -- ignore error responses + local numQuestions = payload:byte(bug+5) << 8 | payload:byte(bug+6) + local numAnswers = payload:byte(bug+7) << 8 | payload:byte(bug+8) + if numQuestions ~= 1 then + log:write("[WARN ] numQuestions ".. numQuestions .."?!?\n") + return + end + if numAnswers == 0 then return end -- empty answers are boring + if numAnswers ~= 1 then log:write("[WARN ] dns.count.answers ".. numAnswers .." not supported\n") return end + local questionsOffset = bug+13 + local hostname = payload:match("^([^\0]+)", questionsOffset) + hostname = hostname:gsub("^[\r\n]", "") -- TODO WTF?!? + hostname = hostname:gsub("[\x04\x02]", ".") -- TODO WTF?!? + local answersOffset = bug + 13 + (24 * numQuestions) + local ttl = payload:byte(answersOffset+6) << 24 | payload:byte(answersOffset+7) << 16 + | payload:byte(answersOffset+8) << 8 | payload:byte(answersOffset+9) + local dataLen = payload:byte(answersOffset+10) | payload:byte(answersOffset+11) + if dataLen ~= 4 then log:write("[WARN ] dns.resp.len ".. dataLen .." not impl\n") return end + local ipv4Str = string.format("%d.%d.%d.%d", payload:byte(answersOffset+12), payload:byte(answersOffset+13), + payload:byte(answersOffset+14), payload:byte(answersOffset+15)) + -- + addEntry(app, ipv4Str, hostname, ttl) +end + + +function extractHostnameFromHttpHeaders( app, it ) + local payload = it:trspPayload() + local _, beg = payload:find("^([A-Z]+ [^ \r\n]+ HTTP/1%.%d\r?\n)") + if not beg then return end + beg = beg + 1 + local httpHost + while true do + local line + local f, t = payload:find("^([^\r\n]+)\r?\n", beg) + if not f then return end + if not payload:byte(1) == 0x72 or payload:byte(1) == 0x68 then goto nextHdr end + line = payload:sub(f, t) + httpHost = line:match("^[Hh][Oo][Ss][Tt]:%s*([^\r\n]+)\r?\n$") + if not httpHost then goto nextHdr end + break + ::nextHdr:: + beg = t + end + httpHost = httpHost:gsub("^(.+):%d+$", "%1") + local dstIp = it:netDstIpStr() + if dstIp == httpHost then return end + addEntry(app, dstIp, httpHost, false, "via http host header") +end + + +function addEntry( app, ipv4Str, hostname, ttl, kludge ) + local key + --log:write("addEntry(app, ".. ipv4Str ..", ".. hostname ..")\n") + if kludge == "via http host header" then + key = ipv4Str .."\0".. hostname .."\0".. "via http host header" + else + key = ipv4Str .."\0".. hostname .."\0".. ttl + end + local entry = app.dnsResponses[key] + if not entry then + entry = { ipv4Str = ipv4Str, hostname = hostname, ttl = ttl, } + app.dnsResponses[key] = entry + end +end + + +function printResult( app ) + local sorted = {} + for _, stream in pairs(app.dnsResponses) do + table.insert(sorted, stream) + end + table.sort(sorted, function(a, b) + if a.ipv4Str < b.ipv4Str then return true end + if a.ipv4Str > b.ipv4Str then return false end + return a.hostname < b.hostname + end) + local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec + local timeFmt = "!%Y-%m-%d_%H:%M:%SZ" + out:write("\n") + out:write(string.format("# Subject Hostname to IP addresses\n")) + out:write(string.format("# Begin %s\n", os.date(timeFmt, math.floor(app.oldestEpochSec)))) + out:write(string.format("# Duration %.3f seconds\n", dumpDurationSec)) + out:write("\n") + --out:write(" .-- KiB per Second\n") + --out:write(" | .-- IP endpoints\n") + --out:write(" | | .-- TCP server port\n") + --out:write(" | | | .-- TCP Payload (less is better)\n") + --out:write(" | | | |\n") + --out:write(".--+----. .----+----------------------. .+--. .-+------------\n") + for i, elem in ipairs(sorted) do + local ipv4Str, hostname, ttl = elem.ipv4Str, elem.hostname, elem.ttl + if ttl then + out:write(string.format("%-14s %-30s # TTL=%ds", ipv4Str, hostname, ttl)) + else + out:write(string.format("%-14s %-30s # ", ipv4Str, hostname)) + end + out:write("\n") + end + out:write("\n") +end + + +main() + + diff --git a/src/main/lua/pcap/tcpDataAmountStats.lua b/src/main/lua/pcap/tcpDataAmountStats.lua new file mode 100644 index 0000000..496687a --- /dev/null +++ b/src/main/lua/pcap/tcpDataAmountStats.lua @@ -0,0 +1,97 @@ + +local newPcapParser = assert(require("pcapit").newPcapParser) + +local main, onPcapFrame, vapourizeUrlVariables, printResult + + +function main() + local app = { + parser = false, + youngestEpochSec = -math.huge, + oldestEpochSec = math.huge, + nextStreamNr = 1, + httpStreams = {}, + } + app.parser = newPcapParser{ + dumpFilePath = "-", + onFrame = function(f)onPcapFrame(app, f)end, + } + app.parser:resume() + printResult(app) +end + + +function onPcapFrame( app, it ) + local out = io.stdout + -- + if not it:tcpSeqNr() then return end + -- + -- + local sec, usec = it:frameArrivalTime() + if sec < app.oldestEpochSec then app.oldestEpochSec = sec end + if sec > app.youngestEpochSec then app.youngestEpochSec = sec end + -- + local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr() + local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort() + local lowIp = (srcIp < dstIp)and(srcIp)or(dstIp) + local higIp = (lowIp == dstIp)and(srcIp)or(dstIp) + local lowPort = math.min(srcPort, dstPort) + local streamId = lowIp .."\0".. higIp .."\0".. lowPort + local stream = app.httpStreams[streamId] + if not stream then + stream = { + srcIp = srcIp, dstIp = dstIp, srcPort = srcPort, dstPort = dstPort, + streamNr = app.nextStreamNr, numBytes = 0, + } + app.nextStreamNr = app.nextStreamNr + 1 + app.httpStreams[streamId] = stream + end + local trspPayload = it:trspPayload() + stream.numBytes = stream.numBytes + trspPayload:len() +end + + +function printResult( app ) + local out = io.stdout + local sorted = {} + local overalValue, maxValue = 0, 0 + for _, stream in pairs(app.httpStreams) do + if stream.numBytes > maxValue then maxValue = stream.numBytes end + overalValue = overalValue + stream.numBytes + table.insert(sorted, stream) + end + table.sort(sorted, function(a, b)return a.numBytes > b.numBytes end) + local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec + local overallBytesPerSec = overalValue / dumpDurationSec + local maxValuePerSec = maxValue / dumpDurationSec + local timeFmt = "!%Y-%m-%d_%H:%M:%SZ" + out:write("\n") + out:write(string.format(" Subject TCP data throughput\n")) + out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec))) + out:write(string.format(" Duration %d seconds\n", dumpDurationSec)) + out:write(string.format(" Overall %.3f KiB per second (%.3f KiBit per second)\n", + overallBytesPerSec/1024, overallBytesPerSec/1024*8)) + out:write("\n") + out:write(" .-- KiB per Second\n") + out:write(" | .-- IP endpoints\n") + out:write(" | | .-- TCP server port\n") + out:write(" | | | .-- TCP Payload (less is better)\n") + out:write(" | | | |\n") + out:write(".--+----. .----+----------------------. .+--. .-+------------\n") + local bar = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" + for i, elem in ipairs(sorted) do + local streamNr, srcIp, dstIp, srcPort, dstPort, numBytes = + elem.streamNr, elem.srcIp, elem.dstIp, elem.srcPort, elem.dstPort, elem.numBytes + local lowPort = math.min(srcPort, dstPort) + local bytesPerSecond = math.floor((numBytes / dumpDurationSec)*10+.5)/10 + out:write(string.format("%9.3f %-14s %-14s %5d ", bytesPerSecond/1024, srcIp, dstIp, lowPort)) + local part = bytesPerSecond / maxValuePerSec; + out:write(bar:sub(0, math.floor(part * bar:len()))) + out:write("\n") + end + out:write("\n") +end + + +main() + diff --git a/src/main/lua/pcap/xServiceStats.lua b/src/main/lua/pcap/xServiceStats.lua index 1cc5961..3bc94a4 100644 --- a/src/main/lua/pcap/xServiceStats.lua +++ b/src/main/lua/pcap/xServiceStats.lua @@ -69,10 +69,10 @@ function printStats( app ) local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec local timeFmt = "!%Y-%m-%d_%H:%M:%SZ" out:write("\n") - out:write(string.format(" Subject Pressure by Services\n")) - out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec))) - out:write(string.format(" Duration %d seconds\n", dumpDurationSec)) - out:write(string.format("Throughput %.1f HTTP requests per second\n", overallCount / dumpDurationSec)) + out:write(string.format(" Subject Pressure by Services\n")) + out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec))) + out:write(string.format(" Duration %d seconds\n", dumpDurationSec)) + out:write(string.format("Matching Requests %.1f (HTTP requests per second)\n", overallCount / dumpDurationSec)) out:write("\n") out:write(" .-- HTTP Requests per Second\n") out:write(" | .-- Service\n") -- cgit v1.1 From 14d5192d803fa5850ef1631f51d7d4b3ff497622 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 1 Nov 2023 12:15:38 +0100 Subject: dump --- src/main/lua/brgmt-logs/DigBrgmtLogs.lua | 5 +++++ src/main/lua/pcap/httpStats.lua | 5 ++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 src/main/lua/brgmt-logs/DigBrgmtLogs.lua diff --git a/src/main/lua/brgmt-logs/DigBrgmtLogs.lua b/src/main/lua/brgmt-logs/DigBrgmtLogs.lua new file mode 100644 index 0000000..fb1f036 --- /dev/null +++ b/src/main/lua/brgmt-logs/DigBrgmtLogs.lua @@ -0,0 +1,5 @@ +-- +-- NOTHING HERE +-- +-- See "brgmt-beef/scripts/". Instead. +-- diff --git a/src/main/lua/pcap/httpStats.lua b/src/main/lua/pcap/httpStats.lua index e4a3aaa..ff48bd2 100644 --- a/src/main/lua/pcap/httpStats.lua +++ b/src/main/lua/pcap/httpStats.lua @@ -1,15 +1,16 @@ local newPcapParser = assert(require("pcapit").newPcapParser) +local out, log = io.stdout, io.stderr local main, onPcapFrame, vapourizeUrlVariables, printHttpRequestStats function main() local app = { parser = false, - foundHttpRequests = {}, youngestEpochSec = -math.huge, oldestEpochSec = math.huge, + foundHttpRequests = {}, } app.parser = newPcapParser{ dumpFilePath = "-", @@ -21,7 +22,6 @@ end function onPcapFrame( app, it ) - local out = io.stdout local sec, usec = it:frameArrivalTime() local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort() -- @@ -80,7 +80,6 @@ end function printHttpRequestStats( app ) - local out = io.stdout local sorted = {} local maxOccurValue = 0 local overallCount = 0 -- cgit v1.1 From 3f03f7ba9e443886a141068c04b448b134ff6886 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 1 Nov 2023 22:56:59 +0100 Subject: Tried to run SQLite scripts from file via php. Bad luck, seems as php cannot do it. --- src/main/php/sqlite-exec.php | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 src/main/php/sqlite-exec.php diff --git a/src/main/php/sqlite-exec.php b/src/main/php/sqlite-exec.php new file mode 100644 index 0000000..8df2fe0 --- /dev/null +++ b/src/main/php/sqlite-exec.php @@ -0,0 +1,30 @@ +srcPath); + if( !$lotsOfSql ) throw new Exception("fopen(\"{$app->srcPath}\")"); + $app->db = new SQLite3($app->dstPath); + if( !$app->db ) throw new Exception("SQLite3(\"{$app->dstPath}\")"); + $db = $app->db; + $db->enableExceptions(true); + $st = $db->prepare($lotsOfSql); + $st->execute(); + $st->close(); +} + + +function main(){ + $app = (object)array( + "srcPath" => NULL/*TODO set me*/, + "dstPath" => NULL/*TODO set me*/, + "srcFile" => NULL, + "db" => NULL, + ); + run($app); +} + + +main(); -- cgit v1.1 From a1cdf167a9c1eea7becb332498b83ec68b56aa7e Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 2 Nov 2023 19:39:02 +0100 Subject: Add notes while analyzing an OOM in production. --- doc/note/openshift/dbg-mem-issues.txt | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 doc/note/openshift/dbg-mem-issues.txt diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt new file mode 100644 index 0000000..4babf7a --- /dev/null +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -0,0 +1,32 @@ + +How to hunt memory issues in production +======================================= + +SVCNAME=foo-prod +PID=42 +OC="oc -n whatever" + + +${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- pmap 9 > "${SVCNAME:?}"-pmap-$(date -u +%Y%m%d-%H%M%S).txt + + +${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- sh -c 'true \ + && printf '\''%s\n'\'' "$(sed '\''s;^(.*)$;FOO;'\'' /proc/'${PID:?}'/smaps)" \ + ' + + +true \ + && ${OC:?} exec -ti "$(${OC:?} get pods|grep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'true \ + && printf '\''h;PageSize;%s\n'\'' $(getconf PAGESIZE) \ + && printf '\''c;%-24s;%8s;%8s;%8s;%5s;%4s;%3s;%8s;%3s;%7s\n'\'' When nThrds size RSS SHR text lib data dt nFds \ + && while true; do true \ + && printf '\''r;%s;%8s;%8d;%8d;%5d;%4d;%3d;%8d;%3d;%7d\n'\'' \ + "$(date -Is)" \ + $(cat /proc/'${PID:?}'/stat|cut -d" " -f20) \ + $(cat /proc/'${PID:?}'/statm) \ + $(ls -1 /proc/9/fd | wc -l) \ + && sleep $(expr 60 - \( $(date +%s) % 60 \)) || break; done' \ + | tee "${SVCNAME:?}"-mem-$(date +%Y%m%d-%H%M%S%z).csv + + + -- cgit v1.1 From da4b60cd0e7711f17055d8896f1fb5e820787c24 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 3 Nov 2023 15:18:04 +0100 Subject: Add some OOM notes. --- doc/note/openshift/dbg-mem-issues.txt | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index 4babf7a..c7359fd 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -2,19 +2,20 @@ How to hunt memory issues in production ======================================= -SVCNAME=foo-prod -PID=42 -OC="oc -n whatever" +true \ + && SVCNAME=foo-prod \ + && PID=9 \ + && OC="oc -n foo" \ + && MemLeakTry1="lua -W MemLeakTry1.lua" \ + && true ${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- pmap 9 > "${SVCNAME:?}"-pmap-$(date -u +%Y%m%d-%H%M%S).txt - ${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- sh -c 'true \ && printf '\''%s\n'\'' "$(sed '\''s;^(.*)$;FOO;'\'' /proc/'${PID:?}'/smaps)" \ ' - true \ && ${OC:?} exec -ti "$(${OC:?} get pods|grep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'true \ && printf '\''h;PageSize;%s\n'\'' $(getconf PAGESIZE) \ @@ -28,5 +29,21 @@ true \ && sleep $(expr 60 - \( $(date +%s) % 60 \)) || break; done' \ | tee "${SVCNAME:?}"-mem-$(date +%Y%m%d-%H%M%S%z).csv +true \ + && ONE="houston-prod-pmap-20231102-163425.txt" \ + && TWO="houston-prod-pmap-20231103-074301.txt" \ + && diff -U0 "${ONE:?}" "${TWO:?}" | egrep '^\+' | sed -r 's_\+([^ ]+) .*$_\1_'|sort|uniq \ + && true + +(true \ + && for F in $(ls *pmap*.txt); do true \ + && printf "$F\n" \ + && DATE="$(date +%s -d "$(echo $F|sed -r 's_.*([0-9]{4})([0-9]{2})([0-9]{2})-([0-9]{2})([0-9]{2})([0-9]{2}).*_\1-\2-\3T\4:\5:\6Z_')")" \ + && <"$F" ${MemLeakTry1:?} --date "${DATE:?}" > "${F%.*}.csv" \ + ;done) + +true \ + && cat houston-prod-pmap-*.csv > houston-prod-pmapAll.csv \ + && true -- cgit v1.1 From a709bdd16b363fcb704961a26e54e11d5d103241 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 3 Nov 2023 23:52:44 +0100 Subject: Add doc how to ffmpeg concat --- doc/note/ffmpeg/ffmpeg.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/note/ffmpeg/ffmpeg.txt b/doc/note/ffmpeg/ffmpeg.txt index 072faa7..2b18a44 100644 --- a/doc/note/ffmpeg/ffmpeg.txt +++ b/doc/note/ffmpeg/ffmpeg.txt @@ -45,6 +45,13 @@ Use -codec:v copy to keep video, or -codec:v no for audio-only. -i INFILE -ss -to OUTFILE +## Concatenate + + && ffmpeg -i one.mkv -i two.mkv \ + -filter_complex '[0:v] [0:a] [1:v] [1:a] concat=n=2:v=1:a=1 [v] [a]' \ + -map "[v]" -map "[a]" out.mkv \ + + ## Rotate Portrait -i INFILE -vf "transpose=2" OUTFILE -- cgit v1.1 From 8604ad484d89df24ad021c11aedcfa67b55c5d4f Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sat, 4 Nov 2023 13:48:38 +0100 Subject: How to convert pdf to png --- doc/note/pdf/pdfToPng.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/note/pdf/pdfToPng.txt diff --git a/doc/note/pdf/pdfToPng.txt b/doc/note/pdf/pdfToPng.txt new file mode 100644 index 0000000..234b1ea --- /dev/null +++ b/doc/note/pdf/pdfToPng.txt @@ -0,0 +1,3 @@ + + && pdftoppm -f 1 -t 1 -png input.pdf > output.pdf \ + -- cgit v1.1 From bf32886848adf2a455fe0533e57632fd6b8f13eb Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 4 Nov 2023 23:18:23 +0100 Subject: Migrated some dockerfiles to portable POSIX qemu scripts. --- doc/note/qemu/setup-android-env.txt | 38 ++++ doc/note/qemu/setup-jni-env.txt | 22 +++ doc/note/qemu/setup-jre8-env.txt | 18 ++ doc/note/qemu/setup-maven-env.txt | 16 ++ doc/note/qemu/setup-nginx-env.txt | 48 +++++ src/main/c/PcapOne/PcapOne.c | 311 --------------------------------- src/main/docker/android-dev.Dockerfile | 44 ----- src/main/docker/gcc-windoof.Dockerfile | 233 ------------------------ src/main/docker/gcc.Dockerfile | 220 ----------------------- src/main/docker/gxx.Dockerfile | 17 -- src/main/docker/jni.Dockerfile | 20 --- src/main/docker/jre8.Dockerfile | 27 --- src/main/docker/maven.Dockerfile | 35 ---- src/main/docker/nginx.Dockerfile | 50 ------ src/main/docker/zlib-deb.Dockerfile | 49 ------ src/main/docker/zlib-mingw.Dockerfile | 51 ------ 16 files changed, 142 insertions(+), 1057 deletions(-) create mode 100644 doc/note/qemu/setup-android-env.txt create mode 100644 doc/note/qemu/setup-jni-env.txt create mode 100644 doc/note/qemu/setup-jre8-env.txt create mode 100644 doc/note/qemu/setup-maven-env.txt create mode 100644 doc/note/qemu/setup-nginx-env.txt delete mode 100644 src/main/c/PcapOne/PcapOne.c delete mode 100644 src/main/docker/android-dev.Dockerfile delete mode 100644 src/main/docker/gcc-windoof.Dockerfile delete mode 100644 src/main/docker/gcc.Dockerfile delete mode 100644 src/main/docker/gxx.Dockerfile delete mode 100644 src/main/docker/jni.Dockerfile delete mode 100644 src/main/docker/jre8.Dockerfile delete mode 100644 src/main/docker/maven.Dockerfile delete mode 100644 src/main/docker/nginx.Dockerfile delete mode 100644 src/main/docker/zlib-deb.Dockerfile delete mode 100644 src/main/docker/zlib-mingw.Dockerfile diff --git a/doc/note/qemu/setup-android-env.txt b/doc/note/qemu/setup-android-env.txt new file mode 100644 index 0000000..97f8801 --- /dev/null +++ b/doc/note/qemu/setup-android-env.txt @@ -0,0 +1,38 @@ +# +# Tools for Android development. +# +# HINT: Migration to qemu not yet tested. +# +set -e + +### Made for debian 10 (alias buster) +true \ + && PKGS_TO_ADD="curl unzip openjdk-11-jdk-headless aapt apksigner zipalign" \ + && PKGS_TO_DEL="curl unzip" \ + && PKGINIT="apt-get update" \ + && PKGADD="apt-get install -y --no-install-recommends" \ + && PKGDEL="apt-get purge -y" \ + && PKGCLEAN="apt-get clean" \ + && PLATFORM_VERSION="22" \ + && BUILD_TOOLS_VERSION="22.0.1" \ + && CMDLINETOOLS_URL="https://dl.google.com/android/repository/commandlinetools-linux-8512546_latest.zip" \ + \ + && export ANDROID_HOME="/usr/lib/android-sdk" \ + && export PATH="$PATH:/usr/lib/android-sdk/build-tools/debian:/usr/lib/android-sdk/cmdline-tools/latest/bin:/usr/lib/android-sdk/build-tools/$BUILD_TOOLS_VERSION" \ + \ + && $PKGINIT \ + && $PKGADD $PKGS_TO_ADD \ + && (cd /var/tmp && curl -sSLO "${CMDLINETOOLS_URL:?}") \ + && if test -x /tmp/cmdline-tools; then echo >&2 "[ERROR] /tmp/cmdline-tools already exists"; false; fi \ + && (cd /tmp && unzip /var/tmp/$(basename "$CMDLINETOOLS_URL") >/dev/null) \ + && mkdir /usr/lib/android-sdk/cmdline-tools \ + && mkdir /usr/lib/android-sdk/cmdline-tools/latest \ + && mv /tmp/cmdline-tools/* /usr/lib/android-sdk/cmdline-tools/latest/. \ + && yes | sdkmanager --install "platforms;android-${PLATFORM_VERSION:?}" "build-tools;${BUILD_TOOLS_VERSION:?}" \ + && `# Those for some reason are broken (wrong linker) so use the debian variant.` \ + && (cd "/usr/lib/android-sdk/build-tools/${BUILD_TOOLS_VERSION:?}" && rm aapt zipalign) \ + && $PKGDEL $PKGS_TO_DEL \ + && $PKGCLEAN \ + && rm -rf /tmp/* \ + && true + diff --git a/doc/note/qemu/setup-jni-env.txt b/doc/note/qemu/setup-jni-env.txt new file mode 100644 index 0000000..62d204a --- /dev/null +++ b/doc/note/qemu/setup-jni-env.txt @@ -0,0 +1,22 @@ + +# +# Debian with tools for java-native-interface development. +# +# HINT: Since migration from docker, I switched it to jdk-17 (because 11 +# seems no longer available). This change is not yet tested. +# +set -e + + +### For debian 12 +true \ + && SUDO= \ + && export JAVA_HOME="/usr/lib/jvm/java-17-openjdk-amd64" \ + && $SUDO apt update \ + && $SUDO apt install -y --no-install-recommends \ + g++ make openjdk-17-jdk-headless \ + && (printf '%s\n' "JAVA_HOME=${JAVA_HOME:?}" \ + ) | $SUDO tee "/etc/environment" >/dev/null \ + && $SUDO apt clean \ + && true + diff --git a/doc/note/qemu/setup-jre8-env.txt b/doc/note/qemu/setup-jre8-env.txt new file mode 100644 index 0000000..f39cf98 --- /dev/null +++ b/doc/note/qemu/setup-jre8-env.txt @@ -0,0 +1,18 @@ + +# +# openjdk java 1.8 runtime environment. +# + +### For alpine 3.16.0 +true \ + && apk add openjdk8-jre \ + && true + +### For debian 9 +true \ + && SUDO= \ + && $SUDO apt update \ + && `# Use one of openjdk-8-jre-headless or openjdk-8-jre` \ + && $SUDO apt install -y --no-install-recommends openjdk-8-jre \ + && true + diff --git a/doc/note/qemu/setup-maven-env.txt b/doc/note/qemu/setup-maven-env.txt new file mode 100644 index 0000000..e4ce16d --- /dev/null +++ b/doc/note/qemu/setup-maven-env.txt @@ -0,0 +1,16 @@ +# +# Maven build env. +# +# Use this to share your hosts repository with the container: +# +# mount -t cifs //10.0.2.2/path/to/.m2/repository /var/tmp/.m2/repository +# +set -e + + +### Made for alpine 3.16.0 +true \ + && apk add maven \ + && sed -i "s,, /var/tmp/.m2/repository\n,g" /usr/share/java/maven-3/conf/settings.xml \ + && true + diff --git a/doc/note/qemu/setup-nginx-env.txt b/doc/note/qemu/setup-nginx-env.txt new file mode 100644 index 0000000..9a5aeb2 --- /dev/null +++ b/doc/note/qemu/setup-nginx-env.txt @@ -0,0 +1,48 @@ +# +# Bare nginx server serving HTTP/80 and HTTPS/443. +# +set -e + + +### Made for alpine 3.16.0 +true \ + && CN="example.com" \ + \ + && apk add nginx openssl \ + && mkdir /etc/ssl/private \ + && openssl genrsa -out /etc/ssl/private/nginx.key 2048 \ + && openssl req -new -key /etc/ssl/private/nginx.key \ + -out /etc/ssl/private/nginx.csr \ + -subj "/C=/ST=/L=/O=/OU=/CN=${CN:?}" \ + && openssl x509 -req -days 365 -in /etc/ssl/private/nginx.csr \ + -signkey /etc/ssl/private/nginx.key -out /etc/ssl/certs/nginx.crt \ + && chgrp nginx /etc/ssl/private/nginx.key \ + && chmod 0640 /etc/ssl/private/nginx.key \ + && printf '%s\n' \ + 'server {' \ + ' listen 80 default_server;' \ + ' listen [::]:80 default_server;' \ + ' listen 443 ssl default_server;' \ + ' listen [::]:443 default_server;' \ + ' ssl_certificate /etc/ssl/certs/nginx.crt;' \ + ' ssl_certificate_key /etc/ssl/private/nginx.key;' \ + ' location / {' \ + ' root /srv/www;' \ + ' index index.html index.htm;' \ + ' }' \ + '}' \ + > /etc/nginx/http.d/default.conf \ + && mkdir /srv/www \ + && printf '

Nginx says hi

\n' > /srv/www/index.html \ + && chown nginx:nginx /srv/www \ + && chown nginx:nginx /srv/www/index.html \ + && apk del openssl \ + && `# Configure nginx as a service` \ + && rc-update add nginx \ + && /etc/init.d/nginx start \ + && sleep 1 \ + && echo \ + && printf 'GET /index.html HTTP/1.0\r\n\r\n'|nc localhost 80 \ + && echo \ + && true + diff --git a/src/main/c/PcapOne/PcapOne.c b/src/main/c/PcapOne/PcapOne.c deleted file mode 100644 index 2eb9e25..0000000 --- a/src/main/c/PcapOne/PcapOne.c +++ /dev/null @@ -1,311 +0,0 @@ -/* TODO fix this bullshit */ -typedef unsigned u_int; -typedef unsigned short u_short; -typedef unsigned char u_char; -#include -/* endOf TODO */ - - -/* System */ -#include -#include -#include -#include -#include - -static char const*const DEV_STDIN = "/dev/stdin"; - -#define FLG_isHelp (1<<0) -#define FLG_isTcpPsh (1<<3) -#define FLG_isTcpRst (1<<4) -#define FLG_isTcpSyn (1<<5) -#define FLG_isTcpFin (1<<6) -#define FLG_isHttpReq (1<<7) -#define FLG_isLlLinux (1<<12) -#define FLG_isHdrPrinted (1<<13) -#define FLG_INIT (0) - -typedef struct PcapOne PcapOne; - - -struct PcapOne { - uint_least16_t flg; - const char *dumpFilePath; - char *pcapErrbuf; - pcap_t *pcap; - unsigned long frameNr; - struct/*most recent frame*/{ - int llProto; - int llHdrEnd; - }; - struct/*most recent packet*/{ - int netProto; - int netBodyLen; - int netHdrEnd; - int_fast32_t netTotLen; - uint_least32_t ipSrcAddr, ipDstAddr; - }; - struct/*most recent segment*/{ - int trspBodyLen; - int trspSrcPort, trspDstPort; - int trspHdrEnd; - }; - struct/*most recent http requst*/{ - const uint8_t *httpReqHeadline; - int httpReqHeadline_len; - int httpReq_off; /* pkg offset from begin of most recent request */ - }; -}; - - -/*BEG func fwd decl*/ -static void parse_ll_LINUX_SLL( PcapOne*, const struct pcap_pkthdr*, const u_char* ); -static void parse_net_IPv4( PcapOne*, const struct pcap_pkthdr*, const u_char* ); -static void parse_trsp_TCP( PcapOne*, const struct pcap_pkthdr*, const u_char* ); -static void parse_appl_HTTP_req( PcapOne*, const struct pcap_pkthdr*, const u_char* ); -static void printParsingResults( PcapOne*, const struct pcap_pkthdr* ); -/*END func fwd decl*/ - -static void printHelp(){ - #define STRQUOT_21a9ffbe344c0792ed88688d6c676359(s) #s - #define STRQUOT(s) STRQUOT_21a9ffbe344c0792ed88688d6c676359(s) - const char *basename = "/"__FILE__ + sizeof("/"__FILE__); - for(; basename[-1] != '/'; --basename ); - printf("%s%s%s", " \n" - " ", basename, " " STRQUOT(PROJECT_VERSION) "\n" - " \n" - " Options:\n" - " \n" - " --pcap-stdin\n" - " Like --pcap but reading from stdin.\n" - " \n" - " --pcap \n" - " Pcap file to operate on. Compressed files are NOT supported.\n" - " \n"); - #undef STRQUOT_21a9ffbe344c0792ed88688d6c676359 - #undef STRQUOT -} - - -static int parseArgs( PcapOne*app, int argc, char**argv ){ - app->flg = FLG_INIT; - app->dumpFilePath = NULL; - for( int iA = 1 ; iA < argc ; ++iA ){ - const char *arg = argv[iA]; - if(0){ - }else if( !strcmp(arg,"--help") ){ - app->flg |= FLG_isHelp; return 0; - }else if( !strcmp(arg,"--pcap") ){ - arg = argv[++iA]; - if( arg == NULL ){ fprintf(stderr, "EINVAL --pcap needs value\n"); return -1; } - app->dumpFilePath = arg; - }else if( !strcmp(arg,"--pcap-stdin") ){ - app->dumpFilePath = DEV_STDIN; - }else{ - fprintf(stderr, "EINVAL: %s\n", arg); return -1; - } - } - if( app->dumpFilePath == NULL ){ - fprintf(stderr, "EINVAL Arg missing: --pcap \n"); return -1; } - return 0; -} - - -static void onPcapPkg( u_char*user, const struct pcap_pkthdr*hdr, const u_char*buf ){ - PcapOne *const app = (void*)user; - - /* prepare for this new packet */ - app->frameNr += 1; - app->flg &= ~(FLG_isTcpPsh | FLG_isTcpRst | FLG_isTcpSyn | FLG_isTcpFin | FLG_isHttpReq); - - /* data-link layer */ - switch( pcap_datalink(app->pcap) ){ - case 0x71: parse_ll_LINUX_SLL(app, hdr, buf); break; - default: assert(!fprintf(stderr,"pcap_datalink() -> 0x%02X\n", pcap_datalink(app->pcap))); - } - - /* network layer */ - switch( app->llProto ){ - case 0x0800: parse_net_IPv4(app, hdr, buf); break; - default: printf("???, proto=0x%04X, network-layer\n", app->llProto); return; - } - - /* transport layer */ - switch( app->netProto ){ - case 0x06: parse_trsp_TCP(app, hdr, buf); break; - default: printf("???, proto=0x%02X, transport-layer\n", app->netProto); return; - } - - assert(app->trspBodyLen >= 0); - - /* application layer, towards server */ - switch( app->trspDstPort ){ - case 80: parse_appl_HTTP_req(app, hdr, buf); break; - case 7012: parse_appl_HTTP_req(app, hdr, buf); break; - case 8080: parse_appl_HTTP_req(app, hdr, buf); break; - } - - printParsingResults(app, hdr); -} - - -static void parse_ll_LINUX_SLL( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){ - assert(hdr->caplen >= 15); - app->llProto = buf[14]<<8 | buf[15]; - app->llHdrEnd = 16; -} - - -static void parse_net_IPv4( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){ - assert(hdr->caplen >= app->llHdrEnd+19 && "TODO_775afde7f19010220e9df8d5e2924c3e"); - int_fast8_t netHdrLen = (buf[app->llHdrEnd+0] & 0x0F) * 4; - app->netTotLen = buf[app->llHdrEnd+2] << 8 | buf[app->llHdrEnd+3]; - app->netProto = buf[app->llHdrEnd+9]; - app->ipSrcAddr = 0 - | ((uint_least32_t)buf[app->llHdrEnd+12]) << 24 - | ((uint_least32_t)buf[app->llHdrEnd+13]) << 16 - | buf[app->llHdrEnd+14] << 8 - | buf[app->llHdrEnd+15] ; - app->ipDstAddr = 0 - | ((uint_least32_t)buf[app->llHdrEnd+16]) << 24 - | ((uint_least32_t)buf[app->llHdrEnd+17]) << 16 - | buf[app->llHdrEnd+18] << 8 - | buf[app->llHdrEnd+19] ; - app->netHdrEnd = app->llHdrEnd + netHdrLen; - app->netBodyLen = app->netTotLen - netHdrLen; -} - - -static void parse_trsp_TCP( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){ - assert(hdr->caplen >= app->netHdrEnd+12 && "TODO_058d5f41043d383e1ba2c492d0db4b6a"); - app->trspSrcPort = buf[app->netHdrEnd+0] << 8 | buf[app->netHdrEnd+1]; - app->trspDstPort = buf[app->netHdrEnd+2] << 8 | buf[app->netHdrEnd+3]; - int tcpHdrLen = (buf[app->netHdrEnd+12] >> 4) * 4; - app->trspHdrEnd = app->netHdrEnd + tcpHdrLen; - app->trspBodyLen = app->netBodyLen - tcpHdrLen; -} - - -static void parse_appl_HTTP_req( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){ - app->flg |= FLG_isHttpReq; - app->httpReqHeadline = buf + app->trspHdrEnd; - app->httpReqHeadline_len = 0; - for(;; ++app->httpReqHeadline_len ){ - if( (app->trspHdrEnd + app->httpReqHeadline_len) > hdr->caplen ) break; - if( app->httpReqHeadline[app->httpReqHeadline_len] == '\r' ) break; - if( app->httpReqHeadline[app->httpReqHeadline_len] == '\n' ) break; - } - /* TODO improve, as now its like a guess only */ - int isNewRequest = 0 - | !memcmp(buf + app->trspHdrEnd, "GET ", 4) - | !memcmp(buf + app->trspHdrEnd, "PUT ", 4) - | !memcmp(buf + app->trspHdrEnd, "POST ", 5) - | !memcmp(buf + app->trspHdrEnd, "DELETE ", 7) - ; - if( isNewRequest ){ - app->httpReq_off = 0; - }else{ - app->httpReq_off = 42; /*TODO make more accurate*/ - } -} - - -static void printParsingResults( PcapOne*app, const struct pcap_pkthdr*hdr ){ - - int isHttpRequest = (app->flg & FLG_isHttpReq); - int isHttpReqBegin = isHttpRequest && app->httpReq_off == 0; - - if( isHttpRequest && isHttpReqBegin ){ - /* find http method */ - const uint8_t *method = app->httpReqHeadline; - int method_len = 0; - for(;; ++method_len ){ - if( method_len > app->httpReqHeadline_len ) break; - if( method[method_len] == ' ' ) break; - } - /* find http uri */ - const uint8_t *uri = method + method_len + 1; - int uri_len = 0; - for(;; ++uri_len ){ - if( method_len + uri_len > app->httpReqHeadline_len ) break; - if( uri[uri_len] == ' ' ) break; - } - if( !(app->flg & FLG_isHdrPrinted) ){ - app->flg |= FLG_isHdrPrinted; - printf("h;Title;HTTP requests\n"); - printf("c;epochSec;srcIp;dstIp;srcPort;dstPort;http_method;http_uri\n"); - } - /* print it as a quick-n-dirty CSV record */ - printf("r;%ld.%06ld;%d.%d.%d.%d;%d.%d.%d.%d;%d;%d;%.*s;%.*s\n", - hdr->ts.tv_sec, hdr->ts.tv_usec, - app->ipSrcAddr >> 24, app->ipSrcAddr >> 16 & 0xFF, app->ipSrcAddr >> 8 & 0xFF, app->ipSrcAddr & 0xFF, - app->ipDstAddr >> 24, app->ipDstAddr >> 16 & 0xFF, app->ipDstAddr >> 8 & 0xFF, app->ipDstAddr & 0xFF, - app->trspSrcPort, app->trspDstPort, - method_len, method, uri_len, uri); - } -} - - -static int run( PcapOne*app ){ - int err; - err = pcap_init(PCAP_CHAR_ENC_UTF_8, app->pcapErrbuf); - if( err == PCAP_ERROR ){ - fprintf(stderr, "libpcap: %s\n", app->pcapErrbuf); err = -1; goto endFn; } - app->pcap = pcap_open_offline( - (app->dumpFilePath == DEV_STDIN) ? "-" : app->dumpFilePath, - app->pcapErrbuf); - if( app->pcap == NULL ){ - fprintf(stderr, "libpcap: %s\n", app->pcapErrbuf); err = -1; goto endFn; } - for(;;){ - err = pcap_dispatch(app->pcap, -1, onPcapPkg, (void*)app); - switch( err ){ - case PCAP_ERROR: - fprintf(stderr, "pcap_dispatch(): %s\n", pcap_geterr(app->pcap)); - err = -1; goto endFn; - case PCAP_ERROR_BREAK: - case PCAP_ERROR_NOT_ACTIVATED: - fprintf(stderr, "pcap_dispatch() -> %d\n", err); - err = -1; goto endFn; - } - if( err > 0 ){ - fprintf(stderr, "Processed %d packages in this turn.\n", err); - continue; - } - break; - } - err = 0; -endFn: - if( app->pcap != NULL ){ pcap_close(app->pcap); app->pcap = NULL; } - return err; -} - - -int main( int argc, char**argv ){ - int err; - static char errbuf[PCAP_ERRBUF_SIZE]; - errbuf[0] = '\0'; - PcapOne app = { - .flg = FLG_INIT, - .pcapErrbuf = errbuf, - .pcap = NULL, - .frameNr = 0, - .trspBodyLen = 0, - }; - #define app (&app) - - err = parseArgs(app, argc, argv); - if( err ){ goto endFn; } - - if( app->flg & FLG_isHelp ){ - printHelp(); goto endFn; } - - err = run(app); - -endFn: - if( err < 0 ) err = -err; - if( err > 0x7F ) err = 1; - return err; - #undef app -} - - diff --git a/src/main/docker/android-dev.Dockerfile b/src/main/docker/android-dev.Dockerfile deleted file mode 100644 index 3f7b4b4..0000000 --- a/src/main/docker/android-dev.Dockerfile +++ /dev/null @@ -1,44 +0,0 @@ -# -# Tools for Android development. -# - -ARG PARENT_IMAGE=debian:buster-20220622-slim -FROM $PARENT_IMAGE - -ARG PKGS_TO_ADD="curl unzip openjdk-11-jdk-headless aapt apksigner zipalign" -ARG PKGS_TO_DEL="curl unzip" -ARG PKGINIT="apt-get update" -ARG PKGADD="apt-get install -y --no-install-recommends" -ARG PKGDEL="apt-get purge -y" -ARG PKGCLEAN="apt-get clean" -ARG PLATFORM_VERSION="22" -ARG BUILD_TOOLS_VERSION="22.0.1" -ARG CMDLINETOOLS_URL="https://dl.google.com/android/repository/commandlinetools-linux-8512546_latest.zip" - -ENV ANDROID_HOME="/usr/lib/android-sdk" -ENV PATH="$PATH:/usr/lib/android-sdk/build-tools/debian:/usr/lib/android-sdk/cmdline-tools/latest/bin:/usr/lib/android-sdk/build-tools/$BUILD_TOOLS_VERSION" - -WORKDIR /work - -RUN true \ - && $PKGINIT \ - && $PKGADD $PKGS_TO_ADD \ - && (cd /tmp && curl -sSLO "$CMDLINETOOLS_URL") \ - && if test -x /tmp/cmdline-tools; then echo >&2 "[ERROR] /tmp/cmdline-tools already exists"; false; fi \ - && (cd /tmp && unzip $(basename "$CMDLINETOOLS_URL") >/dev/null) \ - && mkdir /usr/lib/android-sdk/cmdline-tools \ - && mkdir /usr/lib/android-sdk/cmdline-tools/latest \ - && mv /tmp/cmdline-tools/* /usr/lib/android-sdk/cmdline-tools/latest/. \ - && yes | sdkmanager --install "platforms;android-$PLATFORM_VERSION" "build-tools;$BUILD_TOOLS_VERSION" \ - # Those for some reason are broken (wrong linker) so use the debian variant. - && (cd "/usr/lib/android-sdk/build-tools/${BUILD_TOOLS_VERSION:?}" && rm aapt zipalign) \ - && chown 1000:1000 /work \ - && $PKGDEL $PKGS_TO_DEL \ - && $PKGCLEAN \ - && rm -rf /tmp/* \ - && true - -USER 1000:1000 - -CMD ["sleep", "36000"] - diff --git a/src/main/docker/gcc-windoof.Dockerfile b/src/main/docker/gcc-windoof.Dockerfile deleted file mode 100644 index 69cc18e..0000000 --- a/src/main/docker/gcc-windoof.Dockerfile +++ /dev/null @@ -1,233 +0,0 @@ -# -# Windoof GCC build env -# - -ARG BASE_IMG=alpine:3.16.0 -FROM $BASE_IMG - -ARG PKGSTOADD="ca-certificates curl mingw-w64-gcc make tar" -ARG PKGSTODEL="ca-certificates curl" -ARG PKGADD="apk add" -ARG PKGDEL="apk del" -ARG PKGCLEAN="true" -ARG PKGINIT="true" -ARG VERSION_CJSON="1.7.15" -ARG VERSION_EXPAT="2.4.2" -ARG VERSION_LUA="5.4.3" -ARG VERSION_MBEDTLS="3.1.0" -ARG VERSION_SDL2="2.0.20" -ARG VERSION_SQLITE="3.33.0" -ARG VERSION_ZLIB="1.2.11" - -ENV NDEBUG=1 MAKE_JOBS=8 HOST=x86_64-w64-mingw32 - -RUN true \ - && $PKGINIT && $PKGADD $PKGSTOADD \ - # - && ensureSourceIsCached () { \ - local localPath=${1:?}; \ - local url=${2:?}; \ - if test -f "${localPath:?}"; then \ - echo "[DEBUG] Source avail as \"${localPath:?}\""; \ - return; \ - fi; \ - echo "[DEBUG] Downloading \"${localPath:?}\""; \ - echo "[DEBUG] from \"${url:?}\""; \ - curl -L "$url" -o "${localPath:?}"; \ - } \ - # - && makeZlib () { echo "\n Build zlib\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir "/tmp/zlib" && cd "/tmp/zlib" \ - && tar xzf "${tarbal:?}" \ - && cd zlib-* \ - && mkdir build \ - && export DESTDIR=./build BINARY_PATH=/bin INCLUDE_PATH=/include LIBRARY_PATH=/lib \ - && sed -i "s;^PREFIX =.\*\$;;" win32/Makefile.gcc \ - && make -e -j$MAKE_JOBS -fwin32/Makefile.gcc PREFIX="${HOST:?}"- \ - && make -e -fwin32/Makefile.gcc install PREFIX="${HOST:?}"- \ - && unset DESTDIR BINARY_PATH INCLUDE_PATH LIBRARY_PATH \ - && cp README build/. \ - && (cd build && rm -rf lib/pkgconfig) \ - && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/zlib-${version:?}-windoof.tgz" \ - && cd / && rm -rf "/tmp/zlib" \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f "/tmp/zlib-${version:?}-windoof.tgz" -x include lib \ - && echo -e "\n zlib Done :)\n" \ - && cd "${origDir:?}" ; } \ - && ensureSourceIsCached "/tmp/zlib-${VERSION_ZLIB:?}.tgz" "https://downloads.sourceforge.net/project/libpng/zlib/${VERSION_ZLIB:?}/zlib-${VERSION_ZLIB}.tar.gz" \ - && makeZlib "${VERSION_ZLIB:?}" "/tmp/zlib-${VERSION_ZLIB:?}.tgz" \ - # - && $PKGADD xz \ - && makeExpat () { echo -e "\n Build Expat\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/expat && cd /tmp/expat \ - && tar xf "${tarbal:?}" --strip-components=1 \ - && mkdir build \ - && ./configure --prefix="${PWD:?}/build" --host="${HOST:?}" CFLAGS="-Wall -pedantic --std=c99 -O2" \ - && make -e clean \ - && make -e -j$MAKE_JOBS \ - && make -e install \ - && cp README.md build/. \ - && (cd build && rm -rf lib/cmake lib/libexpat.la lib/pkgconfig) \ - && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/expat-${version:?}-debian.tgz" \ - && cd / && rm -rf /tmp/expat \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/expat-2.4.2-debian.tgz -x bin include lib \ - && echo -e "\n Expat Done :)\n" ; } \ - && ensureSourceIsCached "/tmp/expat-${VERSION_EXPAT}.txz" "https://github.com/libexpat/libexpat/releases/download/R_2_4_2/expat-${VERSION_EXPAT}.tar.xz" \ - && makeExpat "${VERSION_EXPAT:?}" "/tmp/expat-${VERSION_EXPAT}.txz" \ - # - && makeCJSON () { echo -e "\n Build cJSON\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/cJSON && cd /tmp/cJSON \ - && tar xf "${tarbal:?}" \ - && cd * \ - && mkdir build build/obj build/lib build/include \ - && CC="${HOST:?}-gcc" \ - && AR="${HOST:?}-ar" \ - && CFLAGS="-Wall -pedantic -fPIC" \ - && ${CC:?} $CFLAGS -c -o build/obj/cJSON.o cJSON.c \ - && ${CC:?} $CFLAGS -shared -o build/lib/libcJSON.so.1.7.15 build/obj/cJSON.o \ - && (cd build/lib && ln -s libcJSON.so."${version:?}" libcJSON.so."${version%.*}") \ - && (cd build/lib && ln -s libcJSON.so."${version%.*}" libcJSON.so."${version%.*.*}") \ - && ${AR:?} rcs build/lib/libcJSON.a build/obj/cJSON.o \ - && unset CC AR CFLAGS \ - && cp -t build/. LICENSE README.md \ - && cp -t build/include/. cJSON.h \ - && rm -rf build/obj \ - && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -f "/tmp/cJSON-${version:?}-debian.tgz" -cz *) \ - && cd / && rm -rf /tmp/cJSON \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/cJSON-${version:?}-debian.tgz -x include lib \ - && echo -e "\n cJSON Done :)\n"; } \ - && ensureSourceIsCached "/tmp/cJSON-${VERSION_CJSON:?}.tgz" "https://github.com/DaveGamble/cJSON/archive/refs/tags/v1.7.15.tar.gz" \ - && makeCJSON "${VERSION_CJSON:?}" "/tmp/cJSON-${VERSION_CJSON:?}.tgz" \ - # - && $PKGADD python3 \ - && makeMbedtls () { echo -e "\n Build mbedtls\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/mbedtls && cd /tmp/mbedtls \ - && tar xf "${tarbal:?}" \ - && cd * \ - && sed -i 's;^DESTDIR=.*$;DESTDIR='"$PWD"'/build;' Makefile \ - # Yet another hack around as gethostname seems not to exist and I do - # not understand how to disable compiling those "programs" which I - # do not want anyway. - && rm programs/ssl/ssl_mail_client.c programs/test/udp_proxy.c \ - && sed -i '/^\t\+\(ssl\/ssl_mail_client\|test\/udp_proxy\) \+\\$/d' programs/Makefile \ - && sed -i '/^ \+ssl_mail_client$/d' programs/ssl/CMakeLists.txt \ - && export CC="${HOST:?}-gcc" AR="${HOST:?}-ar" WINDOWS_BUILD=1 SHARED=1 \ - && make -e -j$MAKE_JOBS no_test \ - && if [ -e build ]; then echo "ERR already exists: $PWD/build"; false; fi \ - && make -e install \ - && unset CC AR WINDOWS_BUILD SHARED \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/mbedtls-${version:?}-windoof.tgz" \ - && cd / && rm -rf /tmp/mbedtls \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/mbedtls-${version:?}-windoof.tgz -x bin include lib \ - && cd "${origDir:?}" \ - && echo -e "\n mbedtls Done :)\n" ; } \ - && ensureSourceIsCached "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" "https://github.com/Mbed-TLS/mbedtls/archive/refs/tags/v${VERSION_MBEDTLS:?}.tar.gz" \ - && makeMbedtls "${VERSION_MBEDTLS:?}" "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" \ - # - && makeSqLite () { echo -e "\n Build SqLite\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/sqlite && cd /tmp/sqlite \ - && tar xf "${tarbal:?}" \ - && cd * \ - && mkdir build \ - && $PKGADD gcc musl-dev tcl \ - && export CC="${HOST}-gcc" CPP="${HOST:?}-cpp" CXX="${HOST:?}-g++" BCC=gcc \ - && ./configure --prefix="${PWD:?}/build" --host=$HOST CC=$CC CPP=$CPP CXX=$CXX BCC=gcc BEXE=.exe config_TARGET_EXEEXT=.exe \ - && ln -s mksourceid.exe mksourceid \ - && make -e clean \ - && make -e -j$MAKE_JOBS \ - && $PKGDEL gcc musl-dev tcl \ - && make -e install \ - && unset CC CPP CXX BCC \ - && (cd build && rm -rf lemon* mksourceid lib/pkgconfig lib/*.la) \ - && cp README.md LICENSE.md VERSION build/. \ - && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/sqlite-3.33.0-windoof.tgz" \ - && cd / && rm -rf /tmp/sqlite \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/sqlite-${version:?}-windoof.tgz -x bin include lib \ - && cd "$origDir" \ - && echo -e "\n SqLite Done :)\n"; } \ - && ensureSourceIsCached "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" "https://github.com/sqlite/sqlite/archive/refs/tags/version-3.33.0.tar.gz" \ - && makeSqLite "${VERSION_SQLITE:?}" "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" \ - # - && $PKGADD binutils \ - && makeLua () { echo -e "\n Build Lua\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/lua && cd /tmp/lua \ - && tar xf "${tarbal:?}" \ - && cd * \ - && mkdir -p build/bin build/include build/lib build/man/man1 \ - && make -e -j$MAKE_JOBS PLAT=mingw CC="${HOST:?}-gcc -std=gnu99" "AR=${HOST:?}-ar rcu" "RANLIB=${HOST:?}-ranlib" \ - && cp -t build/. README \ - && cp -t build/bin/. src/lua.exe src/luac.exe \ - && cp -t build/include/. src/lua.h src/luaconf.h src/lualib.h src/lauxlib.h src/lua.hpp \ - && cp -t build/lib/. src/liblua.a \ - && cp -t build/man/man1/. doc/lua.1 doc/luac.1 \ - && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/lua-${version:?}-windoof.tgz" \ - && cd / && rm -rf /tmp/lua \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/lua-${version:?}-windoof.tgz -x bin include lib man \ - && cd "$origDir" \ - && echo -e "\n Lua Done :)\n"; } \ - && ensureSourceIsCached "/tmp/lua-${VERSION_LUA:?}.tgz" "https://www.lua.org/ftp/lua-${VERSION_LUA:?}.tar.gz" \ - && makeLua "${VERSION_LUA:?}" "/tmp/lua-${VERSION_LUA:?}.tgz" \ - # - && $PKGADD alsa-lib libxext-dev pulseaudio-dev \ - && makeSDL2 () { echo -e "\n Build SDL2\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/SDL2 && cd /tmp/SDL2 \ - && tar xf "${tarbal:?}" \ - && cd * \ - && ./configure --prefix="${PWD:?}/build" --host="${HOST:?}" \ - && make -e -j$MAKE_JOBS \ - && make -e install \ - && cp -t build/. CREDITS.txt LICENSE.txt README-SDL.txt README.md \ - && (cd build \ - && ls -A \ - | egrep -v '^(CREDITS.txt|LICENSE.txt|README-SDL.txt|RADME.md|bin|lib|include)$' \ - | xargs rm -rf) \ - && (cd build && rm -rf lib/cmake lib/pkgconfig lib/*.la) \ - && (cd build && find -type f -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/SDL2-${version:?}-windoof.tgz" \ - && cd / && rm -rf /tmp/SDL2 \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/SDL2-${version:?}-windoof.tgz -x include lib \ - && cd "$origDir" \ - && echo -e "\n SDL2 Done :)\n"; } \ - && ensureSourceIsCached "/tmp/SDL2-${VERSION_SDL2:?}.tgz" "https://www.libsdl.org/release/SDL2-${VERSION_SDL2}.tar.gz" \ - && makeSDL2 "${VERSION_SDL2:?}" "/tmp/SDL2-${VERSION_SDL2:?}.tgz" \ - # - && $PKGDEL $PKGSTODEL && $PKGCLEAN \ - && true - -WORKDIR /work - -CMD sleep 999999999 - - diff --git a/src/main/docker/gcc.Dockerfile b/src/main/docker/gcc.Dockerfile deleted file mode 100644 index 5894667..0000000 --- a/src/main/docker/gcc.Dockerfile +++ /dev/null @@ -1,220 +0,0 @@ -# -# Debian GCC build env -# - -ARG BASE_IMG=debian:9-slim -FROM $BASE_IMG - -ARG PKGSTOADD="ca-certificates curl gcc make tar" -ARG PKGSTODEL="ca-certificates curl" -ARG PKGADD="apt-get install -y --no-install-recommends" -ARG PKGDEL="apt-get purge -y" -ARG PKGCLEAN="apt-get clean" -ARG PKGINIT="apt-get update" -ARG VERSION_CJSON="1.7.15" -ARG VERSION_EXPAT="2.4.2" -ARG VERSION_LUA="5.4.3" -ARG VERSION_MBEDTLS="3.1.0" -ARG VERSION_SDL2="2.0.20" -ARG VERSION_SQLITE="3.33.0" -ARG VERSION_ZLIB="1.2.11" - -ENV NDEBUG=1 MAKE_JOBS=8 - -RUN true \ - && $PKGINIT && $PKGADD $PKGSTOADD \ - # - && ensureSourceIsCached () { \ - local localPath=${1:?}; \ - local url=${2:?}; \ - if test -f "${localPath:?}"; then \ - echo "[DEBUG] Source avail as \"${localPath:?}\""; \ - return; \ - fi; \ - echo "[DEBUG] Downloading \"${localPath:?}\""; \ - echo "[DEBUG] from \"${url:?}\""; \ - curl -L "$url" -o "${localPath:?}"; \ - } \ - # - && $PKGADD libc-dev \ - && makeZlib () { echo "\n Build zlib\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir "/tmp/zlib" && cd "/tmp/zlib" \ - && tar xzf "${tarbal:?}" \ - && cd zlib-* \ - && mkdir build \ - && ./configure --prefix="${PWD:?}/build/" \ - && make -e -j$MAKE_JOBS \ - && make install \ - && cp README build/. \ - && (cd build \ - && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/zlib-${version:?}-debian.tgz" \ - && cd / && rm -rf "/tmp/zlib" \ - && mkdir -p /usr/local/ \ - && tar -C /usr/local -f "/tmp/zlib-${version:?}-debian.tgz" -x include lib \ - && cd "${origDir:?}" \ - && echo -e "\n zlib Done :)\n" ; } \ - && ensureSourceIsCached "/tmp/zlib-${VERSION_ZLIB:?}.tgz" "https://downloads.sourceforge.net/project/libpng/zlib/${VERSION_ZLIB:?}/zlib-${VERSION_ZLIB}.tar.gz" \ - && makeZlib "${VERSION_ZLIB:?}" "/tmp/zlib-${VERSION_ZLIB:?}.tgz" \ - # - && $PKGADD libc-dev xz-utils \ - && makeExpat () { echo -e "\n Build Expat\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/expat && cd /tmp/expat \ - && tar xf "${tarbal:?}" --strip-components=1 \ - && mkdir build \ - && ./configure --prefix="${PWD:?}/build" CFLAGS='-Wall -pedantic --std=c99 -O2' \ - && make -e clean \ - && make -e -j$MAKE_JOBS \ - && make -e install \ - && cp README.md build/. \ - && (cd build && rm -rf lib/cmake lib/libexpat.la lib/pkgconfig) \ - && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/expat-${version:?}-debian.tgz" \ - && cd / && rm -rf /tmp/expat \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/expat-2.4.2-debian.tgz -x bin include lib \ - && cd "$origDir" \ - && echo -e "\n Expat Done :)\n" ; } \ - && ensureSourceIsCached "/tmp/expat-${VERSION_EXPAT}.txz" "https://github.com/libexpat/libexpat/releases/download/R_2_4_2/expat-${VERSION_EXPAT}.tar.xz" \ - && makeExpat "${VERSION_EXPAT:?}" "/tmp/expat-${VERSION_EXPAT}.txz" \ - # - && $PKGADD libc-dev \ - && makeCJSON () { echo -e "\n Build cJSON\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/cJSON && cd /tmp/cJSON \ - && tar xf "${tarbal:?}" \ - && cd * \ - && mkdir build build/obj build/lib build/include \ - && CFLAGS="-Wall -pedantic -fPIC" \ - && gcc $CFLAGS -c -o build/obj/cJSON.o cJSON.c \ - && gcc $CFLAGS -shared -o build/lib/libcJSON.so.1.7.15 build/obj/cJSON.o \ - && unset CFLAGS \ - && (cd build/lib && ln -s libcJSON.so."${version:?}" libcJSON.so."${version%.*}") \ - && (cd build/lib && ln -s libcJSON.so."${version%.*}" libcJSON.so."${version%.*.*}") \ - && ar rcs build/lib/libcJSON.a build/obj/cJSON.o \ - && cp -t build/. LICENSE README.md \ - && cp -t build/include/. cJSON.h \ - && rm -rf build/obj \ - && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -f "/tmp/cJSON-${version:?}-debian.tgz" -cz *) \ - && cd / && rm -rf /tmp/cJSON \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/cJSON-${version:?}-debian.tgz -x include lib \ - && cd "$origDir" \ - && echo -e "\n cJSON Done :)\n"; } \ - && ensureSourceIsCached "/tmp/cJSON-${VERSION_CJSON:?}.tgz" "https://github.com/DaveGamble/cJSON/archive/refs/tags/v1.7.15.tar.gz" \ - && makeCJSON "${VERSION_CJSON}" "/tmp/cJSON-${VERSION_CJSON:?}.tgz" \ - # - && $PKGADD libc-dev python3 \ - && makeMbedtls () { echo -e "\n Build mbedtls\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/mbedtls && cd /tmp/mbedtls \ - && tar xf "${tarbal:?}" \ - && cd * \ - && sed -i 's;^DESTDIR=.*$;DESTDIR='"$PWD"'/build;' Makefile \ - && SHARED=1 make -e -j$MAKE_JOBS tests lib mbedtls_test \ - && if [ -e build ]; then echo "ERR already exists: $PWD/build"; false; fi \ - && make -e install \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/mbedtls-${version:?}-debian.tgz" \ - && cd / && rm -rf /tmp/mbedtls \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/mbedtls-${version:?}-debian.tgz -x bin include lib \ - && cd "$origDir" \ - && echo -e "\n mbedtls Done :)\n"; } \ - && ensureSourceIsCached "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" "https://github.com/Mbed-TLS/mbedtls/archive/refs/tags/v${VERSION_MBEDTLS:?}.tar.gz" \ - && makeMbedtls "${VERSION_MBEDTLS:?}" "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" \ - # - && $PKGADD libc-dev tcl \ - && makeSqLite () { echo -e "\n Build SqLite\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/sqlite && cd /tmp/sqlite \ - && tar xf "${tarbal:?}" \ - && cd * \ - && mkdir build \ - && ./configure --prefix="${PWD:?}/build" \ - && make -e clean \ - && make -e -j$MAKE_JOBS \ - && make -e install \ - && cp README.md LICENSE.md VERSION build/. \ - && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/sqlite-${version:?}-debian.tgz" \ - && cd / && rm -rf /tmp/sqlite \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/sqlite-${version:?}-debian.tgz -x bin include lib \ - && cd "$origDir" \ - && echo -e "\n SqLite Done :)\n"; } \ - && ensureSourceIsCached "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" "https://github.com/sqlite/sqlite/archive/refs/tags/version-3.33.0.tar.gz" \ - && makeSqLite "${VERSION_SQLITE:?}" "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" \ - # - && $PKGADD libc-dev \ - && makeLua () { echo -e "\n Build Lua\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/lua && cd /tmp/lua \ - && tar xf "${tarbal:?}" \ - && cd * \ - && mkdir -p build/bin build/include build/lib build/man/man1 \ - && make -e -j$MAKE_JOBS \ - && cp -t build/. README \ - && cp -t build/bin/. src/lua src/luac \ - && cp -t build/include/. src/lua.h src/luaconf.h src/lualib.h src/lauxlib.h src/lua.hpp \ - && cp -t build/lib/. src/liblua.a \ - && cp -t build/man/man1/. doc/lua.1 doc/luac.1 \ - && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/lua-${version:?}-debian.tgz" \ - && cd / && rm -rf /tmp/lua \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/lua-${version:?}-debian.tgz -x bin include lib man \ - && cd "$origDir" \ - && echo -e "\n Lua Done :)\n"; } \ - && ensureSourceIsCached "/tmp/lua-${VERSION_LUA:?}.tgz" "https://www.lua.org/ftp/lua-${VERSION_LUA:?}.tar.gz" \ - && makeLua "${VERSION_LUA:?}" "/tmp/lua-${VERSION_LUA:?}.tgz" \ - # - && $PKGADD libc-dev libasound2-dev libxext-dev libpulse-dev \ - && makeSDL2 () { echo -e "\n Build SDL2\n" \ - && local version="${1:?}" \ - && local tarbal="${2:?}" \ - && local origDir="${PWD:?}" \ - && mkdir /tmp/SDL2 && cd /tmp/SDL2 \ - && tar xf "${tarbal:?}" \ - && cd * \ - && ./configure --prefix="${PWD:?}/build" --host= \ - && make -e -j$MAKE_JOBS \ - && make -e install \ - && cp -t build/. CREDITS.txt LICENSE.txt README-SDL.txt README.md \ - && (cd build \ - && ls -A \ - | egrep -v '^(CREDITS.txt|LICENSE.txt|README-SDL.txt|RADME.md|bin|lib|include)$' \ - | xargs rm -rf) \ - && (cd build && rm -rf lib/cmake lib/pkgconfig lib/*.la) \ - && (cd build && find -type f -exec md5sum -b {} + > MD5SUM) \ - && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/SDL2-${version:?}-debian.tgz" \ - && cd / && rm -rf /tmp/SDL2 \ - && mkdir -p /usr/local \ - && tar -C /usr/local -f /tmp/SDL2-${version:?}-debian.tgz -x include lib \ - && cd "$origDir" \ - && echo -e "\n SDL2 Done :)\n"; } \ - && ensureSourceIsCached "/tmp/SDL2-${VERSION_SDL2:?}.tgz" "https://www.libsdl.org/release/SDL2-${VERSION_SDL2}.tar.gz" \ - && makeSDL2 "${VERSION_SDL2:?}" "/tmp/SDL2-${VERSION_SDL2:?}.tgz" \ - # - && $PKGDEL $PKGSTODEL && $PKGCLEAN \ - && true - -WORKDIR /work - -CMD sleep 999999999 - - diff --git a/src/main/docker/gxx.Dockerfile b/src/main/docker/gxx.Dockerfile deleted file mode 100644 index f29f168..0000000 --- a/src/main/docker/gxx.Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -# -# Debian with C++ compiler. -# - -ARG PARENT_IMAGE=debian:buster-20220622-slim -FROM $PARENT_IMAGE - -RUN true \ - && apt update \ - && apt install -y --no-install-recommends \ - g++ make \ - && apt clean \ - && true - -USER 1000:1000 -WORKDIR /work -CMD ["sleep", "36000"] diff --git a/src/main/docker/jni.Dockerfile b/src/main/docker/jni.Dockerfile deleted file mode 100644 index c790e47..0000000 --- a/src/main/docker/jni.Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -# -# Debian with tools for java-native-interface development. -# - -ARG PARENT_IMAGE=debian:buster-20220622-slim -FROM $PARENT_IMAGE - -ENV \ - JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64 - -RUN true \ - && apt update \ - && apt install -y --no-install-recommends \ - g++ make openjdk-11-jdk-headless \ - && apt clean \ - && true - -USER 1000:1000 -WORKDIR /work -CMD ["sleep", "36000"] diff --git a/src/main/docker/jre8.Dockerfile b/src/main/docker/jre8.Dockerfile deleted file mode 100644 index 603b5f5..0000000 --- a/src/main/docker/jre8.Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# -# openjdk java 1.8 runtime environment. -# - -ARG PARENT_IMAGE=alpine:3.16.0 -FROM $PARENT_IMAGE - -ARG PKGS_TO_ADD="openjdk8-jre" -ARG PKGS_TO_DEL="" -ARG PKGINIT="true" -ARG PKGADD="apk add" -ARG PKGDEL="true" -ARG PKGCLEAN="true" - -WORKDIR /work - -RUN true \ - && $PKGINIT \ - && $PKGADD $PKGS_TO_ADD \ - && $PKGDEL $PKGS_TO_DEL \ - && $PKGCLEAN \ - && true - -USER 1000:1000 - -CMD ["sleep", "36000"] - diff --git a/src/main/docker/maven.Dockerfile b/src/main/docker/maven.Dockerfile deleted file mode 100644 index c33d519..0000000 --- a/src/main/docker/maven.Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -# -# Maven build env. -# -# Use this to share your hosts repository with the container: -# -# -v "$HOME/.m2/repository:/data/maven/.m2/repository" -# - -ARG PARENT_IMAGE=alpine:3.16.0 -FROM $PARENT_IMAGE - -ARG PKGS_TO_ADD="maven" -ARG PKGS_TO_DEL="" -ARG PKGINIT="true" -ARG PKGADD="apk add" -ARG PKGDEL="true" -ARG PKGCLEAN="true" - -WORKDIR /work - -RUN true \ - && $PKGINIT \ - && $PKGADD $PKGS_TO_ADD \ - && sed -i "s,, /data/maven/.m2/repository\n,g" /usr/share/java/maven-3/conf/settings.xml \ - && mkdir /data /data/maven \ - && chown 1000:1000 /data/maven \ - && chown 1000:1000 /work \ - && $PKGDEL $PKGS_TO_DEL \ - && $PKGCLEAN \ - && true - -USER 1000:1000 - -CMD ["sleep", "36000"] - diff --git a/src/main/docker/nginx.Dockerfile b/src/main/docker/nginx.Dockerfile deleted file mode 100644 index 097d283..0000000 --- a/src/main/docker/nginx.Dockerfile +++ /dev/null @@ -1,50 +0,0 @@ -# -# Bare nginx server serving HTTP/80 and HTTPS/443 from "/work/www". -# - -ARG PARENT_IMAGE=alpine:3.16.0 -FROM $PARENT_IMAGE - -ARG CN=example.com -ARG PKGS_TO_ADD="nginx openssl" -ARG PKGS_TO_DEL="openssl" -ARG PKGINIT="true" -ARG PKGADD="apk add" -ARG PKGDEL="true" -ARG PKGCLEAN="true" - -WORKDIR /work - -RUN true \ - && $PKGINIT \ - && $PKGADD $PKGS_TO_ADD \ - && mkdir /work/www \ - && openssl genrsa -out /etc/ssl/private/nginx.key 2048 \ - && openssl req -new -key /etc/ssl/private/nginx.key \ - -out /etc/ssl/private/nginx.csr \ - -subj "/C=/ST=/L=/O=/OU=/CN=${CN:?}" \ - && openssl x509 -req -days 365 -in /etc/ssl/private/nginx.csr \ - -signkey /etc/ssl/private/nginx.key -out /etc/ssl/certs/nginx.crt \ - && chgrp nginx /etc/ssl/private/nginx.key \ - && chmod 0640 /etc/ssl/private/nginx.key \ - && printf 'server {\n\ - listen 80 default_server;\n\ - listen [::]:80 default_server;\n\ - listen 443 ssl default_server;\n\ - listen [::]:443 default_server;\n\ - ssl_certificate /etc/ssl/certs/nginx.crt;\n\ - ssl_certificate_key /etc/ssl/private/nginx.key;\n\ - location / {\n\ - root /work/www;\n\ - index index.html index.htm;\n\ - }\n\ -}\n' > /etc/nginx/http.d/default.conf \ - && chown nginx:nginx /work /work/www \ - && $PKGDEL $PKGS_TO_DEL \ - && $PKGCLEAN \ - && true - -USER nginx:nginx - -CMD ["nginx", "-g", "daemon off;"] - diff --git a/src/main/docker/zlib-deb.Dockerfile b/src/main/docker/zlib-deb.Dockerfile deleted file mode 100644 index c5abaf6..0000000 --- a/src/main/docker/zlib-deb.Dockerfile +++ /dev/null @@ -1,49 +0,0 @@ -# -# curl -sSL "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/plain/src/main/docker/zlib-deb.Dockerfile" | docker build -f- . -t "zlib-deb:$(date +%Y%m%d)" -# -ARG PARENT_IMAGE=debian:9-slim -FROM $PARENT_IMAGE - -ARG ZLIB_VERSION="1.2.11" -ARG PKGS_TO_ADD="curl gcc make tar libc-dev ca-certificates vim" -ARG PKGS_TO_DEL="" -ARG PKG_INIT="apt-get update" -ARG PKG_ADD="apt-get install -y --no-install-recommends" -ARG PKG_DEL="apt-get purge" -ARG PKG_CLEAN="apt-get clean" - -RUN true \ - && WORKDIR="/work" \ - && THEOLDPWD="$PWD" \ - # Prepare System - && $PKG_INIT \ - && $PKG_ADD $PKGS_TO_ADD \ - # Prepare zlib - && mkdir "${WORKDIR:?}" && cd "${WORKDIR:?}" \ - && mkdir tarballs tree build \ - && curl -sSL -o "tarballs/zlib-${ZLIB_VERSION}.tgz" "https://github.com/madler/zlib/archive/refs/tags/v${ZLIB_VERSION:?}.tar.gz" \ - && cd "${WORKDIR:?}/tree" \ - && tar --strip-components 1 -xzf "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}.tgz" \ - # Make zlib - && ./configure --prefix="${WORKDIR:?}/build" \ - && make -e \ - && make install \ - && cp README "${WORKDIR}/build/." \ - && cd "${WORKDIR}/build" \ - && rm -rf lib/pkgconfig \ - && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM \ - && tar --owner=0 --group=0 -cz * > "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}-debian.tgz" \ - && cd "${WORKDIR}" \ - && rm -rf "${WORKDIR:?}/tree" "${WORKDIR:?}/build" \ - # install zlib - && mkdir -p /usr/local/ \ - && tar -C /usr/local -f "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}-debian.tgz" -x include lib \ - # cleanup - && cd "${THEOLDPWD:?}" \ - && unset THEOLDPWD WORKDIR \ - && $PKG_DEL $PKGS_TO_DEL \ - && $PKG_CLEAN \ - && true - -WORKDIR /work - diff --git a/src/main/docker/zlib-mingw.Dockerfile b/src/main/docker/zlib-mingw.Dockerfile deleted file mode 100644 index abaa241..0000000 --- a/src/main/docker/zlib-mingw.Dockerfile +++ /dev/null @@ -1,51 +0,0 @@ -# -# curl -sSL "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/plain/src/main/docker/zlib-mingw.Dockerfile" | docker build -f- . -t "zlib-deb:$(date +%Y%m%d)" -# -ARG PARENT_IMAGE=alpine:3.16.0 -FROM $PARENT_IMAGE - -ARG ZLIB_VERSION="1.2.11" -ARG PKGS_TO_ADD="curl mingw-w64-gcc make tar ca-certificates" -ARG PKGS_TO_DEL="" -ARG PKG_INIT="true" -ARG PKG_ADD="apk add " -ARG PKG_DEL="apk del" -ARG PKG_CLEAN="true" - -RUN true \ - && WORKDIR="/work" \ - && THEOLDPWD="$PWD" \ - # Prepare System - && $PKG_INIT \ - && $PKG_ADD $PKGS_TO_ADD \ - # Prepare zlib - && mkdir "${WORKDIR:?}" && cd "${WORKDIR:?}" \ - && mkdir tarballs tree build \ - && curl -sSL -o "tarballs/zlib-${ZLIB_VERSION}.tgz" "https://github.com/madler/zlib/archive/refs/tags/v${ZLIB_VERSION:?}.tar.gz" \ - && cd "${WORKDIR:?}/tree" \ - && tar --strip-components 1 -xzf "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}.tgz" \ - # Make zlib - && sed -i "s;^PREFIX =.\*\$;;" win32/Makefile.gcc \ - && export DESTDIR=../build BINARY_PATH=/bin INCLUDE_PATH=/include LIBRARY_PATH=/lib \ - && make -e -fwin32/Makefile.gcc PREFIX=x86_64-w64-mingw32- \ - && make -e -fwin32/Makefile.gcc install PREFIX=x86_64-w64-mingw32- \ - && unset DESTDIR BINARY_PATH INCLUDE_PATH LIBRARY_PATH \ - && cp README ../build/. \ - && cd "${WORKDIR:?}/build" \ - && rm -rf lib/pkgconfig \ - && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM \ - && tar --owner=0 --group=0 -cz * > "${WORKDIR:?}/tarballs/zlib-1.2.11-windoof.tgz" \ - && cd "${WORKDIR:?}" \ - && rm -rf "${WORKDIR:?}/tree" "${WORKDIR:?}/build" \ - # Install zlib - && mkdir -p /usr/local/x86_64-w64-mingw32 \ - && tar -C /usr/x86_64-w64-mingw32 -f "${WORKDIR:?}/tarballs/zlib-1.2.11-windoof.tgz" -x include lib \ - && cd "${THEOLDPWD:?}" \ - && unset THEOLDPWD WORKDIR \ - && $PKG_DEL $PKGS_TO_DEL \ - && $PKG_CLEAN \ - && true - -WORKDIR /work - - -- cgit v1.1 From 0a93de07d0341378e0844bcc969b0c876e7b6524 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 7 Nov 2023 16:27:48 +0100 Subject: Notes from OOM analysis in a JVM --- doc/note/openshift/dbg-mem-issues.txt | 23 ++- src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua | 235 +++++++++++++++++++++++++ 2 files changed, 255 insertions(+), 3 deletions(-) create mode 100644 src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index c7359fd..47bb9a5 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -4,8 +4,9 @@ How to hunt memory issues in production true \ && SVCNAME=foo-prod \ - && PID=9 \ - && OC="oc -n foo" \ + && PID=42 \ + && OC= \ + && JMX= \ && MemLeakTry1="lua -W MemLeakTry1.lua" \ && true @@ -16,7 +17,7 @@ ${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- sh -c 'true && printf '\''%s\n'\'' "$(sed '\''s;^(.*)$;FOO;'\'' /proc/'${PID:?}'/smaps)" \ ' -true \ +true `# Track pod memory` \ && ${OC:?} exec -ti "$(${OC:?} get pods|grep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'true \ && printf '\''h;PageSize;%s\n'\'' $(getconf PAGESIZE) \ && printf '\''c;%-24s;%8s;%8s;%8s;%5s;%4s;%3s;%8s;%3s;%7s\n'\'' When nThrds size RSS SHR text lib data dt nFds \ @@ -47,3 +48,19 @@ true \ && true +true `# log JMX stuff` \ + && grepUsed () { egrep 'used : ' | sed -r 's_^[^0-9]+ ([0-9]+) [^0-9]+$_\1_'; } \ + && grepPureNumberLine () { egrep $(printf '^[0-9]+\r?$') | sed -r 's_^(.*)\r$_\1_'; } \ + && (true \ + && printf 'c; When ; JvmMetaspace; jvmNonHeap; JvmClassCnt; JvmHeap\n' \ + && while true; do true \ + && metaSpcByts="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/name=Metaspace,type=MemoryPool/attributes/Usage/' | grepUsed)" \ + && jvmNonHeap="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/type=Memory/attributes/NonHeapMemoryUsage/' | grepUsed)" \ + && ldClassCnt="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/type=ClassLoading/attributes/LoadedClassCount/' | grepPureNumberLine)" \ + && jvmHeap="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/type=Memory/attributes/HeapMemoryUsage/' | grepUsed)" \ + && printf 'r;%s;%13d;%11d;%12d;%9d\n' "$(date -Is)" "${metaSpcByts:?}" "${jvmNonHeap:?}" "${ldClassCnt:?}" "${jvmHeap:?}" \ + && sleep $(expr 60 - \( $(date +%s) % 60 \)) || break \ + ; done) \ + | tee "houston-prod-jmx-Metaspace-used-$(date +%Y%m%d-%H%M%S).log" \ + && true + diff --git a/src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua b/src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua new file mode 100644 index 0000000..b17c00f --- /dev/null +++ b/src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua @@ -0,0 +1,235 @@ + +local inn, out, log = io.stdin, io.stdout, io.stderr +local main, parseArgs, printHelp, run, runAsPipe, runWithStdinFilelist + + +function printHelp() + io.stdout:write(" \n" + .." Try to get some useful data out of a 'smap' dump.\n" + .." \n" + .." Options:\n" + .." \n" + .." --yolo\n" + .." WARN: Only use if you know what you do.\n" + .." \n" + .." --stdin-filelist\n" + .." Read LF separated file list form stdin.\n" + .." \n") +end + + +function parseArgs( app ) + if #_ENV.arg == 0 then log:write("EINVAL: Try --help\n") return end + app.isHelp = false + local isYolo = false + local iA = 0 + while true do iA = iA + 1 + local arg = _ENV.arg[iA] + if not arg then + break + elseif arg == "--help" then + app.isHelp = true; return 0 + elseif arg == "--yolo" then + isYolo = true + elseif arg == "--date" then + iA = iA + 1 + app.dateStr = _ENV.arg[iA] + if not app.dateStr then log:write("EINVAL: --date needs value\n") return end + elseif arg == "--stdin-filelist" then + app.isStdinFilelist = true + else + log:write("EINVAL: ".. arg .."\n") return + end + end + return 0 +end + + +function runAsPipe( app ) + local iLine = 0 + if #app.whitelist > 0 then + log:write("[INFO ] Filtering enabled\n") + end + local isHdrWritten = false + while true do + iLine = iLine + 1 + local buf = inn:read("l") + if iLine == 1 then goto nextLine end + --log:write("BUF: ".. buf .."\n") + local addr, sz, perm, note = buf:match("^([%w]+) +(%d+[A-Za-z]?) ([^ ]+) +(.*)$") + if not sz and buf:find("^ +total +%d+[KMGTPE]$") then break end + if not sz then log:write("BUF: '"..tostring(buf).."'\n")error("TODO_20231103111415") end + if sz:find("K$") then sz = sz:gsub("K$", "") * 1024 end + if #app.whitelist > 0 then + if not whitelist[addr] then goto nextLine end + end + if not isHdrWritten then + isHdrWritten = true + out:write("c; Addr ; Size ; Perm ; Note ; arg.date\n") + end + out:write(string.format("r; %s ; %12d ; %s ; %-12s ; %s\n", addr, sz, perm, note, (app.dateStr or""))) + ::nextLine:: + end +end + + +function debugPrintRecursive( out, obj, prefix, isSubCall ) + local typ = type(obj) + if false then + elseif typ == "string" then + out:write("\"") out:write((obj:gsub("\n", "\\n"):gsub("\r", "\\r"))) out:write("\"") + elseif typ == "number" then + out:write(obj) + elseif typ == "nil" then + out:write("nil") + elseif typ == "table" then + local subPrefix = (prefix)and(prefix.." ")or(" ") + for k, v in pairs(obj) do + out:write("\n") out:write(prefix or "") + debugPrintRecursive(out, k, prefix, true) out:write(": ") + debugPrintRecursive(out, v, subPrefix, true) + end + else + error(tostring(typ)) + end + if not isSubCall then out:write("\n")end +end + + +function runWithStdinFilelist( app ) + while true do + local srcFilePath = inn:read("l") + if not srcFilePath then break end + --log:write("[DEBUG] src file \"".. srcFilePath .."\"\n") + local srcFile = io.open(srcFilePath, "rb") + if not srcFile then error("fopen(\""..tostring(srcFilePath).."\")") end + collectData(app, srcFile, srcFilePath) + end + removeUnchanged(app) + printResult(app) +end + + +function collectData( app, src, timestamp ) + assert(src) + assert(timestamp) + local iLine = 0 + while true do + iLine = iLine + 1 + local buf = src:read("l") + if iLine == 1 then goto nextLine end + local addr, sz, perm, note = buf:match("^([%w]+) +(%d+[A-Za-z]?) ([^ ]+) +(.*)$") + if not sz and buf:find("^ +total +%d+[A-Za-z]?\r?$") then break end + if not sz then log:write("[ERROR] BUF: '"..tostring(buf).."'\n")error("TODO_20231103111415") end + if sz:find("K$") then sz = sz:gsub("K$", "") * 1024 end + local addrObj = app.addrs[addr] + if not addrObj then + addrObj = { measures = {} } + app.addrs[addr] = addrObj + end + local measure = { ts = timestamp, sz = sz, } + assert(not addrObj.measures[timestamp]) + addrObj.measures[timestamp] = measure + ::nextLine:: + end +end + + +function removeUnchanged( app ) + local addrsWhichHaveChanged = {} + local knownSizes = {} + for addr, addrObj in pairs(app.addrs) do + for ts, measure in pairs(addrObj.measures) do + local knownSizeKey = assert(addr) + local knownSize = knownSizes[knownSizeKey] + if not knownSize then + knownSize = measure.sz; + knownSizes[knownSizeKey] = knownSize + elseif knownSize ~= measure.sz then + addrsWhichHaveChanged[addr] = true + end + end + end + local newAddrs = {} + for addr, addrObj in pairs(app.addrs) do + if addrsWhichHaveChanged[addr] then + newAddrs[addr] = addrObj + end + end + app.addrs = newAddrs +end + + +function printResult( app ) + -- arrange data + local addrSet, tsSet, szByAddrAndTs = {}, {}, {} + for addr, addrObj in pairs(app.addrs) do + local measures = assert(addrObj.measures) + addrSet[addr] = true + for ts, measure in pairs(measures) do + assert(ts == measure.ts) + local sz = measure.sz + tsSet[ts] = true + szByAddrAndTs[addr.."\0"..ts] = sz + end + end + local addrArr, tsArr = {}, {} + for k,v in pairs(addrSet)do table.insert(addrArr, k) end + for k,v in pairs(tsSet)do table.insert(tsArr, k) end + table.sort(addrArr, function( a, b )return a < b end) + table.sort(tsArr, function( a, b )return a < b end) + -- + out:write("c;file") + for _, addr in ipairs(addrArr) do out:write(";".. addr) end + out:write("\n") + for iTs, ts in ipairs(tsArr) do + out:write("r;".. filterTsForOutput(app, ts)) + for iAddr, addr in ipairs(addrArr) do + local sz = szByAddrAndTs[assert(addr).."\0"..assert(ts)] + out:write(";".. sz) + end + out:write("\n") + end +end + + +function filterTsForOutput( app, ts ) + local y, mnth, d, h, min, sec = ts:match("^houston%-prod%-pmap%-(%d%d%d%d)(%d%d)(%d%d)%-(%d%d)(%d%d)(%d%d).txt$") + return "".. os.time{ year=y, month=mnth, day=d, hour=h, min=min, sec=sec, } +end + + +function sortedFromMap( map, smallerPredicate ) + if not smallerPredicate then smallerPredicate = function(a,b)return a.key < b.key end end + local arr = {} + for k, v in pairs(map) do table.insert(arr, {key=k, val=v}) end + table.sort(arr, smallerPredicate) + return arr +end + + +function run( app ) + if app.isStdinFilelist then + runWithStdinFilelist(app) + else + runAsPipe(app) + end +end + + +function main() + local app = { + isHelp = false, + isStdinFilelist = false, + addrs = {}, + whitelist = { + --["00000000DEADBEAF"] = true, + } + } + if parseArgs(app) ~= 0 then os.exit(1) end + if app.isHelp then printHelp() return end + run(app) +end + + +main() -- cgit v1.1 From 2be59b90732903c75152a68588d20d16f13cd248 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 7 Nov 2023 20:12:12 +0100 Subject: Fine tune --- doc/note/links/links.txt | 1 + doc/note/openshift/dbg-mem-issues.txt | 13 ++++++------- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index b0c97e5..a2c5353 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -83,6 +83,7 @@ Links (Aka argument amplifiers) - SRP "https://blog.ndepend.com/solid-design-the-single-responsibility-principle-srp/" - OCP "https://blog.ndepend.com/solid-design-the-open-close-principle-ocp/" - LSP "https://blog.ndepend.com/solid-design-the-liskov-substitution-principle/" +- ISP "https://blog.ndepend.com/solid-design-the-interface-segregation-principle-isp/" - DIP "https://stackify.com/dependency-inversion-principle/#post-18184-_nuqaxpnmvpn7" ## Java how to handle InterruptedException: diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index 47bb9a5..57864b2 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -17,6 +17,8 @@ ${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- sh -c 'true && printf '\''%s\n'\'' "$(sed '\''s;^(.*)$;FOO;'\'' /proc/'${PID:?}'/smaps)" \ ' +dropPadding () { sed -E 's_ *; *_;_g'; } + true `# Track pod memory` \ && ${OC:?} exec -ti "$(${OC:?} get pods|grep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'true \ && printf '\''h;PageSize;%s\n'\'' $(getconf PAGESIZE) \ @@ -43,24 +45,21 @@ true \ && <"$F" ${MemLeakTry1:?} --date "${DATE:?}" > "${F%.*}.csv" \ ;done) -true \ - && cat houston-prod-pmap-*.csv > houston-prod-pmapAll.csv \ - && true - - true `# log JMX stuff` \ && grepUsed () { egrep 'used : ' | sed -r 's_^[^0-9]+ ([0-9]+) [^0-9]+$_\1_'; } \ && grepPureNumberLine () { egrep $(printf '^[0-9]+\r?$') | sed -r 's_^(.*)\r$_\1_'; } \ && (true \ - && printf 'c; When ; JvmMetaspace; jvmNonHeap; JvmClassCnt; JvmHeap\n' \ + && printf 'c; When ; JvmMetaspace; jvmNonHeap; JvmClassCnt; JvmHeap\n' \ && while true; do true \ && metaSpcByts="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/name=Metaspace,type=MemoryPool/attributes/Usage/' | grepUsed)" \ && jvmNonHeap="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/type=Memory/attributes/NonHeapMemoryUsage/' | grepUsed)" \ && ldClassCnt="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/type=ClassLoading/attributes/LoadedClassCount/' | grepPureNumberLine)" \ && jvmHeap="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/type=Memory/attributes/HeapMemoryUsage/' | grepUsed)" \ - && printf 'r;%s;%13d;%11d;%12d;%9d\n' "$(date -Is)" "${metaSpcByts:?}" "${jvmNonHeap:?}" "${ldClassCnt:?}" "${jvmHeap:?}" \ + && printf 'r;%s;%13d;%11d;%12d;%11d\n' "$(date -Is)" "${metaSpcByts:?}" "${jvmNonHeap:?}" "${ldClassCnt:?}" "${jvmHeap:?}" \ && sleep $(expr 60 - \( $(date +%s) % 60 \)) || break \ ; done) \ | tee "houston-prod-jmx-Metaspace-used-$(date +%Y%m%d-%H%M%S).log" \ && true + + -- cgit v1.1 From b6f61c252329ed222fcf7a254b4e83beb78fa0ce Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 8 Nov 2023 00:56:53 +0100 Subject: Add some notes about jssc build (but see github jssc) --- doc/note/qemu/build-jssc.txt | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 doc/note/qemu/build-jssc.txt diff --git a/doc/note/qemu/build-jssc.txt b/doc/note/qemu/build-jssc.txt new file mode 100644 index 0000000..3acdf6e --- /dev/null +++ b/doc/note/qemu/build-jssc.txt @@ -0,0 +1,41 @@ + +This is only another copy. Likely we should use the one at +https://github.com/hiddenalpha/jssc/blob/master/contrib/hiddenalpha-buildEnv-one +. + +true `# Configure for debian 9` \ + && CXX="g++" \ + && CFLAGS="-fPIC -Wall -pedantic -Werror \ + -Wno-error=long-long \ + -Wno-error=sign-compare \ + -Wno-error=variadic-macros \ + -Wno-long-long" \ + && targets="linux_64" \ + && SUDO= \ + && true + +true `# Setup` \ + && $SUDO apt install -y --no-install-recommends \ + git openjdk-8-jdk-headless g++ maven \ + && true + +true `# Make` \ + && cat contrib/hiddenalpha-buildEnv-one/res/pom.patch | git apply \ + && mvn clean \ + && mvn -PnoCmake compile \ + && printf '%s "%s"\n' "#define JSSC_VERSION" "$(git describe --tags|sed 's,^v,,')" \ + > src/main/cpp/version.h \ + && mkdir -p src/main/resources-precompiled/natives/linux_64 \ + && g++ $CFLAGS -shared \ + -o src/main/resources-precompiled/natives/linux_64/libjssc.so \ + src/main/cpp/_nix_based/jssc.cpp \ + -I/usr/lib/jvm/java-1.8.0-openjdk-amd64/include \ + -I/usr/lib/jvm/java-1.8.0-openjdk-amd64/include/linux \ + -Isrc/main/cpp \ + && for T in ${targets:?}; do + && mvn -PnoCmake -PnoJavah -PnativeJar -P"${T:?}" package \ + ;done \ + && mvn -PnoCmake -PnoJavah -PnoNatives -PwithTestClasspath verify \ + && true + + -- cgit v1.1 From ba88b6ebb4509e71195b52944592c9a7d2a36136 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 10 Nov 2023 19:11:24 +0100 Subject: stuff --- doc/note/links/links.txt | 2 +- doc/note/openshift/dbg-mem-issues.txt | 46 ++++++++++++++++++----------------- 2 files changed, 25 insertions(+), 23 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index a2c5353..faa0481 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -352,7 +352,7 @@ Links (Aka argument amplifiers) - "https://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html" ## YAGNI (but also KISS and DRY) -- "https://medium.com/swlh/yagni-and-dry-the-kiss-of-death-for-your-software-project-cfd44b0654b6#fc82" +- [YAGNI, KISS and DRY](https://medium.com/swlh/yagni-and-dry-the-kiss-of-death-for-your-software-project-cfd44b0654b6#fc82) - [eagle queue json only](https://gitit.post.ch/projects/ISA/repos/eagle/pull-requests/331/overview?commentId=236944) - [Uncle Bob - Why Are Programmers slow](https://youtu.be/G6HyEeEcB-w) diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index 57864b2..010b5c9 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -8,16 +8,12 @@ true \ && OC= \ && JMX= \ && MemLeakTry1="lua -W MemLeakTry1.lua" \ + && dropPadding () { sed -E 's_ *; *_;_g'; } \ + && getPodName () { ${OC:?} get pods | egrep ston-[0-9] | cut -d' ' -f1; } \ && true -${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- pmap 9 > "${SVCNAME:?}"-pmap-$(date -u +%Y%m%d-%H%M%S).txt - -${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- sh -c 'true \ - && printf '\''%s\n'\'' "$(sed '\''s;^(.*)$;FOO;'\'' /proc/'${PID:?}'/smaps)" \ - ' - -dropPadding () { sed -E 's_ *; *_;_g'; } +${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- pmap 9 > "pmap/${SVCNAME:?}"-pmap-$(date -u +%Y%m%d-%H%M%S).txt true `# Track pod memory` \ && ${OC:?} exec -ti "$(${OC:?} get pods|grep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'true \ @@ -30,20 +26,7 @@ true `# Track pod memory` \ $(cat /proc/'${PID:?}'/statm) \ $(ls -1 /proc/9/fd | wc -l) \ && sleep $(expr 60 - \( $(date +%s) % 60 \)) || break; done' \ - | tee "${SVCNAME:?}"-mem-$(date +%Y%m%d-%H%M%S%z).csv - -true \ - && ONE="houston-prod-pmap-20231102-163425.txt" \ - && TWO="houston-prod-pmap-20231103-074301.txt" \ - && diff -U0 "${ONE:?}" "${TWO:?}" | egrep '^\+' | sed -r 's_\+([^ ]+) .*$_\1_'|sort|uniq \ - && true - -(true \ - && for F in $(ls *pmap*.txt); do true \ - && printf "$F\n" \ - && DATE="$(date +%s -d "$(echo $F|sed -r 's_.*([0-9]{4})([0-9]{2})([0-9]{2})-([0-9]{2})([0-9]{2})([0-9]{2}).*_\1-\2-\3T\4:\5:\6Z_')")" \ - && <"$F" ${MemLeakTry1:?} --date "${DATE:?}" > "${F%.*}.csv" \ - ;done) + | tee "mem/${SVCNAME:?}"-mem-$(date +%Y%m%d-%H%M%S%z).csv true `# log JMX stuff` \ && grepUsed () { egrep 'used : ' | sed -r 's_^[^0-9]+ ([0-9]+) [^0-9]+$_\1_'; } \ @@ -58,8 +41,27 @@ true `# log JMX stuff` \ && printf 'r;%s;%13d;%11d;%12d;%11d\n' "$(date -Is)" "${metaSpcByts:?}" "${jvmNonHeap:?}" "${ldClassCnt:?}" "${jvmHeap:?}" \ && sleep $(expr 60 - \( $(date +%s) % 60 \)) || break \ ; done) \ - | tee "houston-prod-jmx-Metaspace-used-$(date +%Y%m%d-%H%M%S).log" \ + | tee "jmx/houston-prod-jmx-Metaspace-used-$(date +%Y%m%d-%H%M%S).log" \ && true +(true `# Fetch logs` \ + && while true; do true \ + && printf '%s - Fetch logs\n' "$(date -Is)" \ + && ${OC:?} exec -i "$(getPodName)" -- sh -c 'cd /usr/local/vertx/logs && (tar -cz houston* || test $? -eq 1)' \ + > "logs/${SVCNAME:?}-log-$(date -u +%Y%m%d-%H%M%SZ).tgz" \ + && sleep $(expr 14400 - \( $(date +%s) % 14400 \)) || break; done \ + && true) + +true \ + && ONE="houston-prod-pmap-20231102-163425.txt" \ + && TWO="houston-prod-pmap-20231103-074301.txt" \ + && diff -U0 "${ONE:?}" "${TWO:?}" | egrep '^\+' | sed -r 's_\+([^ ]+) .*$_\1_'|sort|uniq \ + && true +(true \ + && for F in $(ls *pmap*.txt); do true \ + && printf "$F\n" \ + && DATE="$(date +%s -d "$(echo $F|sed -r 's_.*([0-9]{4})([0-9]{2})([0-9]{2})-([0-9]{2})([0-9]{2})([0-9]{2}).*_\1-\2-\3T\4:\5:\6Z_')")" \ + && <"$F" ${MemLeakTry1:?} --date "${DATE:?}" > "${F%.*}.csv" \ + ;done) -- cgit v1.1 From 40b09678fa9317ba6d6fdbdadd53e9172c764743 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 14 Nov 2023 17:45:48 +0100 Subject: Add performance link. Fix typo. --- doc/note/links/links.txt | 1 + doc/note/openshift/dbg-mem-issues.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index faa0481..77ae20d 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -241,6 +241,7 @@ Links (Aka argument amplifiers) - [Houston storage request timed out large json](https://jira.post.ch/browse/SDCISA-11294) - [Preflux Garbage Collection issues](https://jira.post.ch/browse/SDCISA-4714) - [Preflux Gatherfacts läuft in Timeout](https://jira.post.ch/browse/SDCISA-8136) +- [Performance Fahrplanimports](https://jira.post.ch/browse/SDCISA-11528) - [Houston Optimize EnqueuePatrol](https://jira.post.ch/browse/SDCISA-2876) - [Update beim Fahrzeughersteller dauert zu lange](https://jira.post.ch/browse/SDCISA-9059) - [vortex too slow](https://jira.post.ch/browse/SDCISA-9990) diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index 010b5c9..bf402e6 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -24,7 +24,7 @@ true `# Track pod memory` \ "$(date -Is)" \ $(cat /proc/'${PID:?}'/stat|cut -d" " -f20) \ $(cat /proc/'${PID:?}'/statm) \ - $(ls -1 /proc/9/fd | wc -l) \ + $(ls -1 /proc/'${PID:?}'/fd | wc -l) \ && sleep $(expr 60 - \( $(date +%s) % 60 \)) || break; done' \ | tee "mem/${SVCNAME:?}"-mem-$(date +%Y%m%d-%H%M%S%z).csv -- cgit v1.1 From c258d0107091b941b556d5e31923430cc1b9e50f Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 16 Nov 2023 01:12:26 +0100 Subject: Some qemu notes --- doc/note/qemu/qemu.txt | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 214fb81..1c98baa 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -36,14 +36,14 @@ Windoof: ## Example Params (Usage: CopyPaste, then delege what is not needed) qemu-system-x86_64 \ - -enable-kvm -m size=4G -smp cores=$(nproc) \ + -accel kvm:whpx:hax:tcg -m size=4G -smp cores=$(nproc) \ -monitor stdio \ `# Drives & Boot.` \ -boot order=dc \ -cdrom "path/to/cd.iso" \ -hda "$(dirname "$(realpath "$0")")/hda.qcow2" \ - `# Isolated Network` \ - -netdev user,id=n1,ipv6=off,restrict=y \ + `# Isolated Network plus host port/cmd reachable from guest` \ + -netdev 'user,id=n1,ipv6=off,restrict=y,guestfwd=guestfwd=tcp:10.0.2.9:80-cmd:ncat 127.0.0.1 80' \ -device e1000,netdev=n1 \ `# 10.0.2.x network with host redirect` \ -netdev user,id=n0,ipv6=off,hostfwd=tcp:127.0.0.1:${SSH_PORT:-2222}-:22 \ @@ -53,12 +53,13 @@ qemu-system-x86_64 \ -device e1000,netdev=n1 \ `# Fix broken host systems` \ -L "${QEMU_HOME:?}/Bios" -bios "${QEMU_HOME:?}/Bios/bios-256k.bin" \ + `# Fix broken guest systems` \ -device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet \ `# Choose ONE of those for graphic output` \ -nographic \ -display sdl,grab-mod=rctrl \ -display gtk,show-menubar=on \ - -display vnc=127.0.0.1:0 `#HINT: 0 is port 5900` \ + -display vnc=127.0.0.1:0,to=99 `#HINT: 0 is port 5900` \ ; ### Example manual adapter setup (inside VM) for socket mcast network: -- cgit v1.1 From 7b590b725eb2ab31526c69b4e399ffad92118ff6 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 16 Nov 2023 20:17:15 +0100 Subject: Update some notes --- doc/note/links/links.txt | 6 +++--- doc/note/tcpdump/tcpdump.txt | 7 ++++++- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 77ae20d..477e02a 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -218,7 +218,7 @@ Links (Aka argument amplifiers) - "https://stackoverflow.com/a/20177092/4415884" - "https://github.com/swisspush/gateleen/pull/426#discussion_r813752075" - "https://github.com/swisspush/gateleen/blob/v1.1.61/gateleen-kafka/src/main/java/org/swisspush/gateleen/kafka/KafkaMessageSender.java#L21" -- performance long "https://m.youtube.com/watch?v=x5akmCWgGY0" +- [How to use java fancy streams](https://m.youtube.com/watch?v=x5akmCWgGY0) - think please ... "https://m.youtube.com/watch?v=hSfylUXhpkA" ## The Only way to Format Dates ISO 8601 @@ -235,8 +235,8 @@ Links (Aka argument amplifiers) ## Performance DOES matter - "https://github.com/swisspush/gateleen/pull/456#discussion_r844865066" - [Performance Excuses Debunked](https://m.youtube.com/watch?v=x2EOOJg8FkA) -- [Is writing performant code too expensive?](https://m.youtube.com/watch?v=EpYr3T5VP6w) -- [Simple Code, High Performance](https://m.youtube.com/watch?v=Ge3aKEmZcqY) +- [Frameworks & clusters do not solve it](https://www.youtube.com/watch?v=EpYr3T5VP6w&t=1109) +- [Simple Code, High Performance](https://m.youtube.com/watch?v=Ge3aKEmZcqY&t=78) - [Houston Last führt zu Neustart](https://wikit.post.ch/x/HDV8T) - [Houston storage request timed out large json](https://jira.post.ch/browse/SDCISA-11294) - [Preflux Garbage Collection issues](https://jira.post.ch/browse/SDCISA-4714) diff --git a/doc/note/tcpdump/tcpdump.txt b/doc/note/tcpdump/tcpdump.txt index d19c36b..7df4335 100644 --- a/doc/note/tcpdump/tcpdump.txt +++ b/doc/note/tcpdump/tcpdump.txt @@ -16,8 +16,13 @@ Tcpdump redis=6379, brox=7022, fluentd=7099 - cd /tmp && timeout --foreground -s INT 180 tcpdump -ni any -C 50M -W 999 -w houston-STAGE-tcp-`date -u +%Y%m%d-%H%M%S`.pcap "not port 443 and not port 6379 and not port 7022 and not port 7099" -z gzip + cd /usr/local/vertx/houston-storage-file && timeout --foreground -s INT 180 tcpdump -ni any -C 50M -W 999 -w houston-STAGE-tcp-`date -u +%Y%m%d-%H%M%S`.pcap "not port 443 and not port 6379 and not port 7022 and not port 7099" -z gzip + cd /tmp && timeout --foreground -s INT 180 tcpdump -ni any -C 50M -W 999 -w houston-prod-tcp-`date -u +%Y%m%d-%H%M%S`.pcap "not port 443 and not port 6379 and not port 7022 and not port 7099" -z gzip + + ocprod exec -i "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /tmp && tar c "houston-prod-tcp-20231114-165243.pcap*.gz"' > houston-prod-tcp-20231114-165243.pcap.gz.tar + + ocprod exec -ti "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /tmp && watch ls -Ahl' ## pcap cli PreProcessing -- cgit v1.1 From 91e4d70be7cf667b2e5154b3b09e3c5633a4f799 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 17 Nov 2023 19:20:23 +0100 Subject: Update some links --- doc/note/links/links.txt | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 477e02a..20eb16b 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -109,6 +109,7 @@ Links (Aka argument amplifiers) - "https://medium.com/humans-create-software/composition-over-inheritance-cb6f88070205" - "https://softwareengineering.stackexchange.com/a/371715/306800" - "https://youtu.be/wfMtDGfHWpA" +- [Damn! Use it!](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/5/overview?commentId=316867) ## requirements, experts, business: - "https://www.youtube.com/watch?v=BKorP55Aqvg" @@ -205,6 +206,7 @@ Links (Aka argument amplifiers) ## Java Memory - "youtube.com/watch?v=f2aNWtt0QRo" - jvm GC statistics "https://stackoverflow.com/a/467366" +- [sizeof(java.lang.Object)](https://stackoverflow.com/a/258150/4415884) ## Yaml Is Bullshit - "https://www.arp242.net/yaml-config.html#can-be-hard-to-edit-especially-for-large-files" @@ -252,9 +254,10 @@ Links (Aka argument amplifiers) - [Houston OOM 2023-01-20](https://wikit.post.ch/x/iRepPQ) - [Houston OOM Killed](https://jira.post.ch/browse/SDCISA-10871) - [http cache disable](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/1/overview?commentId=287832) +- [How to repair KISS for performance](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff) ## Common Performance -- [Optimize code by doing less](https://m.youtube.com/watch?v=5rb0vvJ7NCY) +- [going fast is about doing less](https://m.youtube.com/watch?v=5rb0vvJ7NCY) - [CppCon Tuning Benchmarks clang CPUs Compilers" ](https://m.youtube.com/watch?v=nXaxk27zwlk) ## Bugs are not an issue @@ -355,7 +358,8 @@ Links (Aka argument amplifiers) ## YAGNI (but also KISS and DRY) - [YAGNI, KISS and DRY](https://medium.com/swlh/yagni-and-dry-the-kiss-of-death-for-your-software-project-cfd44b0654b6#fc82) - [eagle queue json only](https://gitit.post.ch/projects/ISA/repos/eagle/pull-requests/331/overview?commentId=236944) -- [Uncle Bob - Why Are Programmers slow](https://youtu.be/G6HyEeEcB-w) +- [How to repair KISS](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff) +- [won't stream, bcause YAGNI](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/5/overview?commentId=316503) ## How to format method parameters - "https://gitit.post.ch/projects/ISA/repos/god-backend/pull-requests/281/overview?commentId=210650" -- cgit v1.1 From cd3753a465bf08e754a6d27eb81d6031c15a1df3 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 21 Nov 2023 15:43:36 +0100 Subject: Try to automate some annoying mshit garbage --- src/main/lua/mshitteams/ListEmlInbox.lua | 173 +++++++++++++++++++++++++++++ src/main/lua/mshitteams/SendRawMsEmail.lua | 60 ++++++++++ 2 files changed, 233 insertions(+) create mode 100644 src/main/lua/mshitteams/ListEmlInbox.lua create mode 100644 src/main/lua/mshitteams/SendRawMsEmail.lua diff --git a/src/main/lua/mshitteams/ListEmlInbox.lua b/src/main/lua/mshitteams/ListEmlInbox.lua new file mode 100644 index 0000000..31e9648 --- /dev/null +++ b/src/main/lua/mshitteams/ListEmlInbox.lua @@ -0,0 +1,173 @@ +-- +-- Sources: +-- - [Authorize](https://learn.microsoft.com/en-us/graph/auth-v2-user?tabs=http) +-- +-- TODO: scriptlee 0.0.5-83-gdffa272 seems to SEGFAULT constantly here. No +-- matter if we use socket or newHttpClient. +-- + +local SL = require("scriptlee") +local AF_INET = SL.posix.AF_INET +local getaddrinfo = SL.posix.getaddrinfo +local INADDR_ANY = SL.posix.INADDR_ANY +local inaddrOfHostname = SL.posix.inaddrOfHostname +local IPPROTO_TCP = SL.posix.IPPROTO_TCP +local objectSeal = SL.objectSeal +local SOCK_STREAM = SL.posix.SOCK_STREAM +local socket = SL.posix.socket +local startOrExecute = SL.reactor.startOrExecute +--for k,v in pairs(SL)do print("SL",k,v)end os.exit(1) +SL = nil + +local authorizeToMsGraphApi, getAccessToken, getAuthHdr, httpUrlEncode, main, parseArgs, printHelp, + run, getMyProfileForDebugging +local inn, out, log = io.stdin, io.stdout, io.stderr + + +function printHelp() + out:write(" \n" + .." Options:\n" + .." \n" + .."\n\n") +end + + +function parseArgs( app ) + if #_ENV.arg == 0 then log:write("EINVAL: Args missing\n")return-1 end + local iA = 0 + local isYolo = false + while true do iA = iA + 1 + local arg = _ENV.arg[iA] + if not arg then + break + elseif arg == "--help" then + app.isHelp = true; return 0 + elseif arg == "--yolo" then + isYolo = true + else + log:write("EINVAL: ".. arg .."\n") return-1 + end + end + if not isYolo then log:write("EINVAL\n")return-1 end + return 0 +end + + +function getMyProfileForDebugging( app ) + local sck = app.sck + local authKey, authVal = getAuthHdr(app) + local req = objectSeal{ + base = false, + } + sck:write("GET /v1.0/me HTTP/1.1\r\n" + .."".. authKey ..": ".. authVal .."\r\n" + .."\r\n") + sck:flush() + local buf = sck:read() + log:write("buf is '"..tostring(buf).."'\n") +end + + +function authorizeToMsGraphApi( app ) + -- See "https://learn.microsoft.com/en-us/graph/auth-v2-user?tabs=http" + local redirUri = "https%3A%2F%2Flogin.microsoftonline.com%2Fcommon%2Foauth2%2Fnativeclient" + local scope = "offline_access%20user.read%20mail.read" + local stateDict = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + local state = {} + for i=1, 16 do + local rnd = math.random(1, #stateDict) + state[i] = chars:sub(rnd, rnd) + end + state = table.concat(state) + local method = "GET" + local url = "https://login.microsoftonline.com/".. app.msTenant .."/oauth2/v2.0/authorize" + .."?client_id=".. app.msAppId + .."&response_type=code" + .."&redirect_uri=".. redirUri + .."&response_mode=query" + .."&scope=".. httpUrlEncode(app.msPerms) + .."&state=".. state +end + + +function httpUrlEncode( app, str ) + local hexDigits, ret, beg, iRd = "0123456789ABCDEF", {}, 1, 0 + ::nextInputChar:: + iRd = iRd + 1 + local byt = str:byte(iRd) + if not byt then + elseif byt == 0x2D -- dash + or byt >= 0x30 and byt <= 0x39 -- 0-9 + or byt >= 0x40 and byt <= 0x5A -- A-Z + or byt >= 0x60 and byt <= 0x7A -- a-z + then + goto nextInputChar + end + if beg < iRd then table.insert(ret, str:sub(beg, iRd-1)) end + if not byt then return table.concat(ret) end + table.insert(ret, "%") + local hi = (byt & 0xF0) >> 4 +1 + local lo = (byt & 0x0F) +1 + table.insert(ret, hexDigits:sub(hi, hi) .. hexDigits:sub(lo, lo)) + beg = iRd + 1 + goto nextInputChar +end + + +function getAccessToken( app ) + -- See "https://learn.microsoft.com/en-us/graph/auth-v2-user?tabs=http#3-request-an-access-token" + local method = "POST" + local uri = "/".. app.msTenant .."/oauth2/v2.0/token" + local hdrs = { + { "Host", "https://login.microsoftonline.com" }, + { "Content-Type", "application/x-www-form-urlencoded" }, + } + local body = "" + .."client_id=".. app.msAppId + .."&scope=".. scope + .."&code=".. code + .."&redirect_uri=".. redirUri + .."&grant_type=authorization_code" +end + + +-- @return 1 - HTTP header key +-- @return 2 - HTTP header value +function getAuthHdr( app ) + return "Authorization", ("Bearer ".. app.msBearerToken) +end + + +function initHttpClient( app ) + local sck = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP) + sck:connect(app.msGraphHost, app.msGraphPort, app.connectTimeoutMs) + app.sck = sck +end + + +function run( app ) + initHttpClient(app) + getMyProfileForDebugging(app) +end + + +function main() + local app = objectSeal{ + isHelp = false, + msGraphHost = "127.0.0.1", + msGraphPort = 8080, + msTenant = "TODO_1700563786", + msAppId = "TODO_1700563821", + msPerms = "offline_access user.read mail.read", + msBearerToken = "TODO_1700575589", + connectTimeoutMs = 3000, + sck = false, + } + if parseArgs(app) ~= 0 then os.exit(1) end + if app.isHelp then printHelp() return end + run(app) +end + + +startOrExecute(main) + diff --git a/src/main/lua/mshitteams/SendRawMsEmail.lua b/src/main/lua/mshitteams/SendRawMsEmail.lua new file mode 100644 index 0000000..2d2940e --- /dev/null +++ b/src/main/lua/mshitteams/SendRawMsEmail.lua @@ -0,0 +1,60 @@ + +local SL = require("scriptlee") +--local newHttpClient = SL.newHttpClient +--local newShellcmd = SL.newShellcmd +--local objectSeal = SL.objectSeal +--local parseJSON = SL.parseJSON +--local sleep = SL.posix.sleep +--local newCond = SL.posix.newCond +--local async = SL.reactor.async +--local startOrExecute = SL.reactor.startOrExecute +--for k,v in pairs(SL)do print("SL",k,v)end os.exit(1) +SL = nil + +local mod = {} +local inn, out, log = io.stdin, io.stdout, io.stderr + + +function mod.printHelp() + out:write(" \n" + .." Options:\n" + .." \n" + .."\n\n") +end + + +function mod.parseArgs( app ) + local isStdinn = false + local iA = 0 + while true do iA = iA + 1 + local arg = _ENV.arg[iA] + if not arg then + break + elseif arg == "--help" then + app.isHelp = true; return 0 + else + log:write("Unknown arg: ".. arg .."\n") return-1 + end + end + if not isStdinn then log:write("Bad args\n")return-1 end + return 0 +end + + +function mod.run( app ) + error("TODO_20230608125925") +end + + +function mod.main() + local app = objectSeal{ + isHelp = false, + } + if mod.parseArgs(app) ~= 0 then os.exit(1) end + if app.isHelp then mod.printHelp() return end + mod.run(app) +end + + +startOrExecute(mod.main) + -- cgit v1.1 From 5d205503c56ccb2a4991670dae5359bbd41f4f72 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 21 Nov 2023 15:44:48 +0100 Subject: Update notes about OOM analysis. --- doc/note/links/links.txt | 3 +++ doc/note/openshift/dbg-mem-issues.txt | 15 +++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 20eb16b..ab09e60 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -547,3 +547,6 @@ Links (Aka argument amplifiers) ## Git paisa complain about rebase stuff - [Complain about force-pushes](https://gitit.post.ch/projects/ISA/repos/lazlar/pull-requests/3/overview?commentId=311142) +## Angular is terrible +- [Why angular sucks](https://medium.com/dirtyjs/why-angular-2-4-5-6-sucks-afb36567ad68) + diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index bf402e6..07baaff 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -52,6 +52,21 @@ true `# log JMX stuff` \ && sleep $(expr 14400 - \( $(date +%s) % 14400 \)) || break; done \ && true) + +`# Create heap dump` +com.sun.management.dumpHeap("/usr/local/vertx/houston-storage-file/houston-___-heap-2023____-____Z.hprof", true) + +`# Inspect` +ocprod exec -i "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && ls -Ahl' + +true `# Get made heap dump` \ + && echo create checksum. \ + && ocprod exec -i "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && md5sum -b houston-*.hprof >> MD5SUM-$(date -u +%Y%m%d-%H%M%SZ)' \ + && echo checksum done. Begin dload. \ + && ocprod exec -i "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && tar c houston-*.hprof MD5SUM*' | (cd heapDump && tar x) \ + && echo dload done \ + && true + true \ && ONE="houston-prod-pmap-20231102-163425.txt" \ && TWO="houston-prod-pmap-20231103-074301.txt" \ -- cgit v1.1 From 0b03287281001a825be030f7872906967ec0c609 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 21 Nov 2023 19:41:35 +0100 Subject: Update notes. --- doc/note/openshift/dbg-mem-issues.txt | 44 +++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index 07baaff..a06ba4f 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -52,6 +52,50 @@ true `# log JMX stuff` \ && sleep $(expr 14400 - \( $(date +%s) % 14400 \)) || break; done \ && true) +true `# Merge logs` \ + && PREFX="houston-prod-log-" \ + && SUFFX=".tgz" \ + && for SRCTGZ in \ + ${PREFX:?}20231110-160510Z${SUFFX:?} \ + ${PREFX:?}20231110-181226Z${SUFFX:?} \ + ${PREFX:?}20231114-093133Z${SUFFX:?} \ + ${PREFX:?}20231114-120002Z${SUFFX:?} \ + ${PREFX:?}20231114-123040Z${SUFFX:?} \ + ${PREFX:?}20231114-160001Z${SUFFX:?} \ + ${PREFX:?}20231116-082933Z${SUFFX:?} \ + ${PREFX:?}20231116-120002Z${SUFFX:?} \ + ${PREFX:?}20231116-160002Z${SUFFX:?} \ + ${PREFX:?}20231117-081112Z${SUFFX:?} \ + ${PREFX:?}20231117-120001Z${SUFFX:?} \ + ${PREFX:?}20231117-164612Z${SUFFX:?} \ + ; do true \ + && echo "[INFO ] Create ${SRCTGZ%.*}.log" \ + && tar xf ../logs/${SRCTGZ:?} \ + && unzip houston.log.1.zip \ + && cat houston.log.1 houston.log > "${SRCTGZ%.*}.log" \ + && rm houston.log.1.zip houston.log.1 houston.log \ + ;done && true \ + && printf '%s' ' + local newLogFileMerger = require("AndisLogUtils").newLogFileMerger + local merger = newLogFileMerger{ + sources = { + io.open("houston-prod-log-20231110-160510Z.log", "r"), + io.open("houston-prod-log-20231110-181226Z.log", "r"), + io.open("houston-prod-log-20231114-093133Z.log", "r"), + io.open("houston-prod-log-20231114-120002Z.log", "r"), + io.open("houston-prod-log-20231114-123040Z.log", "r"), + io.open("houston-prod-log-20231114-160001Z.log", "r"), + io.open("houston-prod-log-20231116-082933Z.log", "r"), + io.open("houston-prod-log-20231116-120002Z.log", "r"), + io.open("houston-prod-log-20231116-160002Z.log", "r"), + io.open("houston-prod-log-20231117-081112Z.log", "r"), + io.open("houston-prod-log-20231117-120001Z.log", "r"), + io.open("houston-prod-log-20231117-164612Z.log", "r"), + }, + snk = { write = function( t, buf, b, c ) io.stdout:write(buf) io.stdout:write("\n") end, }, + }' | lua -W - | gzip -n > houston-log-merged-$(date -u +%Y%m%d-%H%M%S)Z.log.gz \ + && true + `# Create heap dump` com.sun.management.dumpHeap("/usr/local/vertx/houston-storage-file/houston-___-heap-2023____-____Z.hprof", true) -- cgit v1.1 From 5726bea2ae263cf670d1419aa0cc05a2f6ea7a91 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 22 Nov 2023 01:44:17 +0100 Subject: Tinker around with mdoof api. Still no access possible. --- src/main/lua/mshitteams/ListEmlInbox.lua | 194 ++++++++++++++++++++++--------- 1 file changed, 142 insertions(+), 52 deletions(-) diff --git a/src/main/lua/mshitteams/ListEmlInbox.lua b/src/main/lua/mshitteams/ListEmlInbox.lua index 31e9648..33bc877 100644 --- a/src/main/lua/mshitteams/ListEmlInbox.lua +++ b/src/main/lua/mshitteams/ListEmlInbox.lua @@ -1,20 +1,22 @@ -- -- Sources: -- - [Authorize](https://learn.microsoft.com/en-us/graph/auth-v2-user?tabs=http) +-- - [Auth witout app register](https://techcommunity.microsoft.com/t5/teams-developer/authenticate-microsoft-graph-api-with-username-and-password/m-p/3940540) -- -- TODO: scriptlee 0.0.5-83-gdffa272 seems to SEGFAULT constantly here. No -- matter if we use socket or newHttpClient. -- local SL = require("scriptlee") -local AF_INET = SL.posix.AF_INET -local getaddrinfo = SL.posix.getaddrinfo -local INADDR_ANY = SL.posix.INADDR_ANY -local inaddrOfHostname = SL.posix.inaddrOfHostname -local IPPROTO_TCP = SL.posix.IPPROTO_TCP +local newHttpClient = SL.newHttpClient +--local AF_INET = SL.posix.AF_INET +--local getaddrinfo = SL.posix.getaddrinfo +--local INADDR_ANY = SL.posix.INADDR_ANY +--local inaddrOfHostname = SL.posix.inaddrOfHostname +--local IPPROTO_TCP = SL.posix.IPPROTO_TCP local objectSeal = SL.objectSeal -local SOCK_STREAM = SL.posix.SOCK_STREAM -local socket = SL.posix.socket +--local SOCK_STREAM = SL.posix.SOCK_STREAM +--local socket = SL.posix.socket local startOrExecute = SL.reactor.startOrExecute --for k,v in pairs(SL)do print("SL",k,v)end os.exit(1) SL = nil @@ -26,9 +28,8 @@ local inn, out, log = io.stdin, io.stdout, io.stderr function printHelp() out:write(" \n" - .." Options:\n" - .." \n" - .."\n\n") + .." TODO write help page\n" + .." \n") end @@ -42,51 +43,138 @@ function parseArgs( app ) break elseif arg == "--help" then app.isHelp = true; return 0 + elseif arg == "--user" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --user needs value\n")return-1 end + app.msUser = arg + elseif arg == "--pass" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --pass needs value\n")return-1 end + app.msPass = arg elseif arg == "--yolo" then isYolo = true else log:write("EINVAL: ".. arg .."\n") return-1 end end - if not isYolo then log:write("EINVAL\n")return-1 end + if not isYolo then log:write("EINVAL: This app is only for insiders\n")return-1 end + if not app.msUser then log:write("EINVAL: --user missing\n") return-1 end + if not app.msPass then log:write("EINVAL: --pass missing\n") return-1 end return 0 end function getMyProfileForDebugging( app ) - local sck = app.sck + local http = app.http local authKey, authVal = getAuthHdr(app) local req = objectSeal{ base = false, + method = "GET", + uri = "/v1.0/me", + rspCode = false, + rspBody = {}, } - sck:write("GET /v1.0/me HTTP/1.1\r\n" - .."".. authKey ..": ".. authVal .."\r\n" - .."\r\n") - sck:flush() - local buf = sck:read() - log:write("buf is '"..tostring(buf).."'\n") + req.base = http:request{ + cls = req, + host = app.msGraphHost, + port = app.msGraphPort, + connectTimeoutMs = 3000, + method = req.method, + url = req.uri, + hdrs = { + { authKey, authVal }, + }, + --useHostHdr = , + --useTLS = true, + onRspHdr = function( rsp, cls ) + cls.rspCode = rsp.status + if rsp.status ~= 200 then + log:write("> ".. req.method .." ".. req.uri .."\n> \n") + log:write("< ".. rsp.proto .." ".. rsp.status .." ".. rsp.phrase .."\n") + for _,h in ipairs(rsp.headers)do log:write("< "..h[1]..": "..h[2].."\n")end + log:write("\n") + end + end, + onRspChunk = function(buf, cls) + if cls.rspCode ~= 200 then + log:write("< ") + log:write((buf:gsub("\n", "\n< "))) + log:write("\n") + else + assert(type(buf) == "string") + table.insert(cls.rspBody, buf) + end + end, + onRspEnd = function(cls) + if cls.rspCode ~= 200 then error("Request failed.") end + cls.rspBody = table.concat(cls.rspBody) + log:write("Response was:\n\n") + log:write(cls.rspBody) + log:write("\n\n") + end, + } + req.base:closeSnk() end function authorizeToMsGraphApi( app ) - -- See "https://learn.microsoft.com/en-us/graph/auth-v2-user?tabs=http" - local redirUri = "https%3A%2F%2Flogin.microsoftonline.com%2Fcommon%2Foauth2%2Fnativeclient" - local scope = "offline_access%20user.read%20mail.read" - local stateDict = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - local state = {} - for i=1, 16 do - local rnd = math.random(1, #stateDict) - state[i] = chars:sub(rnd, rnd) - end - state = table.concat(state) - local method = "GET" - local url = "https://login.microsoftonline.com/".. app.msTenant .."/oauth2/v2.0/authorize" - .."?client_id=".. app.msAppId - .."&response_type=code" - .."&redirect_uri=".. redirUri - .."&response_mode=query" - .."&scope=".. httpUrlEncode(app.msPerms) - .."&state=".. state + local http = app.http + local req = objectSeal{ + base = false, + method = "GET", + uri = "https://login.microsoftonline.com/".. app.msTenant .."/oauth2/v2.0/token", + hdrs = { + { "Content-Type", "application/x-www-form-urlencoded" }, + }, + reqBody = "" + .. "grant_type=password" + .."&resource=https://graph.microsoft.com" + .."&username=".. httpUrlEncode(app, app.msUser) .."" + .."&password=".. httpUrlEncode(app, app.msPass) .."", + rspProto = false, rspCode = false, rspPhrase = false, + rspHdrs = false, + rspBody = {}, + } + req.base = http:request{ + cls = req, + connectTimeoutMs = app.connectTimeoutMs, + host = app.msGraphHost, + port = app.msGraphPort, + method = req.method, + url = req.uri, + hdrs = req.hdrs, + onRspHdr = function( rsp, cls ) + cls.rspProto = rsp.proto + cls.rspCode = rsp.status + cls.rspPhrase = rsp.phrase + cls.rspHdrs = rsp.headers + end, + onRspChunk = function( buf, cls ) table.insert(cls.rspBody, buf) end, + onRspEnd = function( cls ) + local rspBody = table.concat(cls.rspBody) cls.rspBody = false + if cls.rspCode ~= 200 then + log:write("[ERROR] Request failed\n") + log:write("> ".. cls.method .." ".. cls.uri .."\n") + for _, h in ipairs(req.hdrs) do log:write("> ".. h[1] ..": ".. h[2] .."\n") end + log:write("> \n") + log:write("> ".. cls.reqBody:gsub("\r?\n", "\n> ") .."\n") + log:write("< ".. cls.rspProto .." ".. cls.rspCode .." ".. cls.rspPhrase .."\n") + for _, h in ipairs(cls.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end + log:write("< \n") + log:write("< ".. rspBody:gsub("\r?\n", "\n< ") .."\n") + error("TODO_10aa11de804e733337e7c244298791c6") + end + log:write("< ".. cls.rspProto .." ".. cls.rspCode .." ".. cls.rspPhrase .."\n") + for _, h in ipairs(cls.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end + log:write("< \n") + log:write("< ".. rspBody:gsub("\r?\n", "\n< ") .."\n") + -- How to continue: + --local token = rsp.bodyJson.access_token + --local authHdr = { "Authorization", "Bearer ".. token, } + end, + } + --req.base:write(req.reqBody) + req.base:closeSnk() end @@ -97,6 +185,7 @@ function httpUrlEncode( app, str ) local byt = str:byte(iRd) if not byt then elseif byt == 0x2D -- dash + or byt == 0x2E -- dot or byt >= 0x30 and byt <= 0x39 -- 0-9 or byt >= 0x40 and byt <= 0x5A -- A-Z or byt >= 0x60 and byt <= 0x7A -- a-z @@ -134,34 +223,35 @@ end -- @return 1 - HTTP header key -- @return 2 - HTTP header value function getAuthHdr( app ) - return "Authorization", ("Bearer ".. app.msBearerToken) -end - - -function initHttpClient( app ) - local sck = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP) - sck:connect(app.msGraphHost, app.msGraphPort, app.connectTimeoutMs) - app.sck = sck + assert(app.msToken) + return "Authorization", ("Bearer ".. app.msToken) end function run( app ) - initHttpClient(app) - getMyProfileForDebugging(app) + app.http = newHttpClient{} + authorizeToMsGraphApi(app) + --getMyProfileForDebugging(app) end function main() local app = objectSeal{ isHelp = false, - msGraphHost = "127.0.0.1", - msGraphPort = 8080, - msTenant = "TODO_1700563786", - msAppId = "TODO_1700563821", + msGraphHost = "graph.microsoft.com", msGraphPort = 443, + --msGraphHost = "127.0.0.1", msGraphPort = 80, + -- TODO take this from a failed api call, which has this in the rsp headers. + msAuthUri = "https://login.microsoftonline.com/common/oauth2/authorize", + msTenant = "common", -- TODO configurable + -- TODO take this from a failed api call, which has this in the rsp headers. + msAppId = "00000003-0000-0000-c000-000000000000", msPerms = "offline_access user.read mail.read", - msBearerToken = "TODO_1700575589", + msToken = false, + msUser = false, + msPass = false, + http = false, connectTimeoutMs = 3000, - sck = false, + --sck = false, } if parseArgs(app) ~= 0 then os.exit(1) end if app.isHelp then printHelp() return end -- cgit v1.1 From bd0686736ca44c79ef65d89354e9c2c38487777e Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 22 Nov 2023 16:57:26 +0100 Subject: ListEmlInbox.lua still crashes scriptlee --- src/main/lua/mshitteams/ListEmlInbox.lua | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/lua/mshitteams/ListEmlInbox.lua b/src/main/lua/mshitteams/ListEmlInbox.lua index 33bc877..fca7b9c 100644 --- a/src/main/lua/mshitteams/ListEmlInbox.lua +++ b/src/main/lua/mshitteams/ListEmlInbox.lua @@ -5,7 +5,9 @@ -- -- TODO: scriptlee 0.0.5-83-gdffa272 seems to SEGFAULT constantly here. No -- matter if we use socket or newHttpClient. --- +-- TODO: scriptlee 0.0.5-87-g946ebdc crashes through assertion: +-- Assertion failed: cls->msg.connect.sck->vt->unwrap != NULL, file src/windoof/c/io/AsyncIO.c, line 421 +-- local SL = require("scriptlee") local newHttpClient = SL.newHttpClient -- cgit v1.1 From a7fae4870f6796f17b044a54a795f1a048604d29 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 22 Nov 2023 22:54:09 +0100 Subject: ListEmlInbox.lua try continue --- src/main/lua/mshitteams/ListEmlInbox.lua | 119 ++++++++++++++++++++----------- 1 file changed, 79 insertions(+), 40 deletions(-) diff --git a/src/main/lua/mshitteams/ListEmlInbox.lua b/src/main/lua/mshitteams/ListEmlInbox.lua index fca7b9c..e2fc817 100644 --- a/src/main/lua/mshitteams/ListEmlInbox.lua +++ b/src/main/lua/mshitteams/ListEmlInbox.lua @@ -124,7 +124,9 @@ function authorizeToMsGraphApi( app ) local req = objectSeal{ base = false, method = "GET", - uri = "https://login.microsoftonline.com/".. app.msTenant .."/oauth2/v2.0/token", + host = (app.proxyHost or app.msLoginHost), + port = (app.proxyPort or app.msLoginPort), + uri = false, hdrs = { { "Content-Type", "application/x-www-form-urlencoded" }, }, @@ -137,44 +139,62 @@ function authorizeToMsGraphApi( app ) rspHdrs = false, rspBody = {}, } - req.base = http:request{ - cls = req, - connectTimeoutMs = app.connectTimeoutMs, - host = app.msGraphHost, - port = app.msGraphPort, - method = req.method, - url = req.uri, - hdrs = req.hdrs, - onRspHdr = function( rsp, cls ) - cls.rspProto = rsp.proto - cls.rspCode = rsp.status - cls.rspPhrase = rsp.phrase - cls.rspHdrs = rsp.headers - end, - onRspChunk = function( buf, cls ) table.insert(cls.rspBody, buf) end, - onRspEnd = function( cls ) - local rspBody = table.concat(cls.rspBody) cls.rspBody = false - if cls.rspCode ~= 200 then - log:write("[ERROR] Request failed\n") - log:write("> ".. cls.method .." ".. cls.uri .."\n") - for _, h in ipairs(req.hdrs) do log:write("> ".. h[1] ..": ".. h[2] .."\n") end - log:write("> \n") - log:write("> ".. cls.reqBody:gsub("\r?\n", "\n> ") .."\n") - log:write("< ".. cls.rspProto .." ".. cls.rspCode .." ".. cls.rspPhrase .."\n") - for _, h in ipairs(cls.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end + if app.proxyHost then + req.uri = "https://".. app.msLoginHost ..":".. app.msLoginPort + .."/".. app.msTenant .."/oauth2/v2.0/token" + else + req.uri = "/".. app.msTenant .."/oauth2/v2.0/token" + end + local ok, ex = xpcall(function() + req.base = http:request{ + cls = req, + connectTimeoutMs = app.connectTimeoutMs, + host = req.host, + port = req.port, + method = req.method, + url = req.uri, + hdrs = req.hdrs, + onRspHdr = function( rsp, req ) + req.rspProto = rsp.proto + req.rspCode = rsp.status + req.rspPhrase = rsp.phrase + req.rspHdrs = rsp.headers + end, + onRspChunk = function( buf, req ) table.insert(req.rspBody, buf) end, + onRspEnd = function( req ) + local rspBody = table.concat(req.rspBody) req.rspBody = false + if req.rspCode ~= 200 then + log:write("[ERROR] Request failed\n") + log:write("peer ".. req.host ..":".. req.port .."\n") + log:write("> ".. req.method .." ".. req.uri .."\n") + for _, h in ipairs(req.hdrs) do log:write("> ".. h[1] ..": ".. h[2] .."\n") end + log:write("> \n") + log:write("> ".. req.reqBody:gsub("\r?\n", "\n> ") .."\n") + log:write("< ".. req.rspProto .." ".. req.rspCode .." ".. req.rspPhrase .."\n") + for _, h in ipairs(req.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end + log:write("< \n") + log:write("< ".. rspBody:gsub("\r?\n", "\n< ") .."\n") + error("TODO_10aa11de804e733337e7c244298791c6") + end + log:write("< ".. req.rspProto .." ".. req.rspCode .." ".. req.rspPhrase .."\n") + for _, h in ipairs(req.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end log:write("< \n") log:write("< ".. rspBody:gsub("\r?\n", "\n< ") .."\n") - error("TODO_10aa11de804e733337e7c244298791c6") - end - log:write("< ".. cls.rspProto .." ".. cls.rspCode .." ".. cls.rspPhrase .."\n") - for _, h in ipairs(cls.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end - log:write("< \n") - log:write("< ".. rspBody:gsub("\r?\n", "\n< ") .."\n") - -- How to continue: - --local token = rsp.bodyJson.access_token - --local authHdr = { "Authorization", "Bearer ".. token, } - end, - } + -- How to continue: + --local token = rsp.bodyJson.access_token + --local authHdr = { "Authorization", "Bearer ".. token, } + end, + } + end, debug.traceback) + if not ok then + log:write("[ERROR] Request failed 2\n") + log:write("peer ".. req.host ..":".. req.port .."\n") + log:write("> ".. req.method .." ".. req.uri .."\n") + for _, h in ipairs(req.hdrs) do log:write("> ".. h[1] ..": ".. h[2] .."\n") end + log:write("> \n") + log:write("> ".. req.reqBody:gsub("\r?\n", "\n> ") .."\n") + error(ex) + end --req.base:write(req.reqBody) req.base:closeSnk() end @@ -238,12 +258,31 @@ end function main() + local loginHost, loginPort, graphHost, graphPort, proxyHost, proxyPort + local choice = 3 + if choice == 1 then + loginHost = "login.microsoftonline.com"; loginPort = 443 + graphHost = "graph.microsoft.com"; graphPort = 443 + proxyHost = "127.0.0.1"; proxyPort = 3128 + elseif choice == 2 then + loginHost = "127.0.0.1"; loginPort = 8081 + graphHost = "127.0.0.1"; graphPort = 8081 + proxyHost = false; proxyPort = false + elseif choice == 3 then + loginHost = "login.microsoftonline.com"; loginPort = 443 + graphHost = "127.0.0.1"; graphPort = 8081 + proxyHost = "127.0.0.1"; proxyPort = 3128 + elseif choice == 4 then + loginHost = "login.microsoftonline.com"; loginPort = 443 + graphHost = "graph.microsoft.com"; graphPort = 443 + proxyHost = false; proxyPort = false + else error("TODO_1700683244") end local app = objectSeal{ isHelp = false, - msGraphHost = "graph.microsoft.com", msGraphPort = 443, - --msGraphHost = "127.0.0.1", msGraphPort = 80, + msLoginHost = loginHost, msLoginPort = loginPort, + msGraphHost = graphHost, msGraphPort = graphPort, + proxyHost = proxyHost, proxyPort = proxyPort, -- TODO take this from a failed api call, which has this in the rsp headers. - msAuthUri = "https://login.microsoftonline.com/common/oauth2/authorize", msTenant = "common", -- TODO configurable -- TODO take this from a failed api call, which has this in the rsp headers. msAppId = "00000003-0000-0000-c000-000000000000", -- cgit v1.1 From 99b74d12fe3bc1a8acb4ac139e9b1401c234c8a9 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 23 Nov 2023 17:03:49 +0100 Subject: ListEmlInbox.lua tinker --- src/main/lua/mshitteams/ListEmlInbox.lua | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/src/main/lua/mshitteams/ListEmlInbox.lua b/src/main/lua/mshitteams/ListEmlInbox.lua index e2fc817..23b42aa 100644 --- a/src/main/lua/mshitteams/ListEmlInbox.lua +++ b/src/main/lua/mshitteams/ListEmlInbox.lua @@ -30,7 +30,20 @@ local inn, out, log = io.stdin, io.stdout, io.stderr function printHelp() out:write(" \n" - .." TODO write help page\n" + .." Experiments for M$ graph API.\n" + .." \n" + .." WARN: This tool is experimental! Do NOT use it!\n" + .." \n" + .." Options:\n" + .." \n" + .." --user \n" + .." M$ user.\n" + .." \n" + .." --pass \n" + .." M$ password. TODO get rid of this insecure idea.\n" + .." \n" + .." --appId \n" + .." AppId (aka client_id). See M$ doc about it.\n" .." \n") end @@ -38,7 +51,7 @@ end function parseArgs( app ) if #_ENV.arg == 0 then log:write("EINVAL: Args missing\n")return-1 end local iA = 0 - local isYolo = false + --local isYolo = false while true do iA = iA + 1 local arg = _ENV.arg[iA] if not arg then @@ -53,15 +66,20 @@ function parseArgs( app ) iA = iA + 1; arg = _ENV.arg[iA] if not arg then log:write("EINVAL: --pass needs value\n")return-1 end app.msPass = arg - elseif arg == "--yolo" then - isYolo = true + elseif arg == "--appId" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --appId needs value\n")return-1 end + app.msAppId = arg + --elseif arg == "--yolo" then + -- isYolo = true else log:write("EINVAL: ".. arg .."\n") return-1 end end - if not isYolo then log:write("EINVAL: This app is only for insiders\n")return-1 end if not app.msUser then log:write("EINVAL: --user missing\n") return-1 end if not app.msPass then log:write("EINVAL: --pass missing\n") return-1 end + if not app.msAppId then log:write("EINVAL: --appId missing\n")return-1 end + --if not isYolo then log:write("EINVAL: --yolo missing\n")return-1 end return 0 end @@ -234,7 +252,7 @@ function getAccessToken( app ) { "Content-Type", "application/x-www-form-urlencoded" }, } local body = "" - .."client_id=".. app.msAppId + .."client_id=".. assert(app.appId) .."&scope=".. scope .."&code=".. code .."&redirect_uri=".. redirUri @@ -285,7 +303,7 @@ function main() -- TODO take this from a failed api call, which has this in the rsp headers. msTenant = "common", -- TODO configurable -- TODO take this from a failed api call, which has this in the rsp headers. - msAppId = "00000003-0000-0000-c000-000000000000", + msAppId = false, msPerms = "offline_access user.read mail.read", msToken = false, msUser = false, -- cgit v1.1 From 542ceee1697c1f3185fde3992cc043545e931719 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 24 Nov 2023 16:20:53 +0100 Subject: update some notes. --- doc/note/links/links.txt | 1 + doc/note/openshift/dbg-mem-issues.txt | 12 ++++++++++++ 2 files changed, 13 insertions(+) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index ab09e60..75baf2f 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -255,6 +255,7 @@ Links (Aka argument amplifiers) - [Houston OOM Killed](https://jira.post.ch/browse/SDCISA-10871) - [http cache disable](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/1/overview?commentId=287832) - [How to repair KISS for performance](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff) +- [Houston readyness fails often](https://jira.post.ch/browse/SDCISA-13746?focusedId=1899551&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1899551) ## Common Performance - [going fast is about doing less](https://m.youtube.com/watch?v=5rb0vvJ7NCY) diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index a06ba4f..0435081 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -111,6 +111,18 @@ true `# Get made heap dump` \ && echo dload done \ && true +true `# Probe` \ + && logFile="readyness-probe.log" \ + && printf 'c; when ;rspCode; connectSec; trsfSec; totlSec; curlExit\n' | tee -a "${logFile:?}" \ + && while true; do true \ + && printf 'r;%s;%7d;%11.3f;%8.3f;%8.3f;%9d\n' \ + $(date +%Y-%m-%dT%H:%M:%S%z) \ + $(curl -sSw "%{http_code} %{time_connect} %{time_starttransfer} %{time_total}" "${houstonServerInfoUrl:?}" -o /dev/null || ex=$? && echo " $ex") \ + | tee -a "${logFile:?}" \ + && sleep $(expr 60 - $(date +%s) % 60) || break \ + ;done \ + && true + true \ && ONE="houston-prod-pmap-20231102-163425.txt" \ && TWO="houston-prod-pmap-20231103-074301.txt" \ -- cgit v1.1 From 7882b8b233516293a9e730b51214d50da7ee9008 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 29 Nov 2023 09:46:01 +0100 Subject: Add notes about windoof outlook, proto handler. Plus curl timing --- doc/note/curl/curl.txt | 13 +++++++++++++ doc/note/windoof/outlook-expor-eml.txt | 14 ++++++++++++++ doc/note/windoof/proto-handler.txt | 6 ++++++ 3 files changed, 33 insertions(+) create mode 100644 doc/note/curl/curl.txt create mode 100644 doc/note/windoof/outlook-expor-eml.txt create mode 100644 doc/note/windoof/proto-handler.txt diff --git a/doc/note/curl/curl.txt b/doc/note/curl/curl.txt new file mode 100644 index 0000000..b5a3556 --- /dev/null +++ b/doc/note/curl/curl.txt @@ -0,0 +1,13 @@ + + +## Timing + + curl example.com -w "\n\nconnect=%{time_connect}s, trsf=%{time_starttransfer}s, totl=%{time_total}s" + + + + +## Sources + +- [time trace](https://stackoverflow.com/a/18215566/4415884) + diff --git a/doc/note/windoof/outlook-expor-eml.txt b/doc/note/windoof/outlook-expor-eml.txt new file mode 100644 index 0000000..895779f --- /dev/null +++ b/doc/note/windoof/outlook-expor-eml.txt @@ -0,0 +1,14 @@ + +How to export email as EML file in stupid systems +================================================= + +- "https://outlook.office.com" +- Compose a new eMail. +- DragNDrop the email from the inbox to the draft (add attachment). +- Now "Download" that attachment. + + +## Source + +- [How to save mail as eml](https://superuser.com/a/1474143/1123359) + diff --git a/doc/note/windoof/proto-handler.txt b/doc/note/windoof/proto-handler.txt new file mode 100644 index 0000000..b4d214c --- /dev/null +++ b/doc/note/windoof/proto-handler.txt @@ -0,0 +1,6 @@ + + + +## Source +- [how to fix windoof](https://superuser.com/a/1066769/1123359) + -- cgit v1.1 From ffb560bb42cfb5cc19dff0503bb539e46a40951f Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 29 Nov 2023 19:58:39 +0100 Subject: Disable more kludge via preflux patch --- src/main/patch/preflux/default.patch | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/src/main/patch/preflux/default.patch b/src/main/patch/preflux/default.patch index b3531b4..5b5f3f3 100644 --- a/src/main/patch/preflux/default.patch +++ b/src/main/patch/preflux/default.patch @@ -2,6 +2,34 @@ TODO describe (like in houston) +diff --git a/pom.xml b/pom.xml +--- a/pom.xml ++++ b/pom.xml +@@ -57,6 +57,24 @@ + + + 00.01.00.00 ++ true ++ true ++ true ++ false ++ true ++ true ++ true ++ false ++ true ++ true ++ true ++ true ++ true ++ true ++ true ++ true ++ true ++ true + + + diff --git a/preflux-web/pom.xml b/preflux-web/pom.xml index 752be702..8f91c053 100644 --- a/preflux-web/pom.xml -- cgit v1.1 From 039f73881ba3a6ccd0c9cf7cf4946664c35c9279 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 30 Nov 2023 17:08:16 +0100 Subject: Add a java class override. Intent is to prevent usage of unwanted logger impls during debugging. --- .../java/org/apache/logging/slf4j/Log4jLogger.java | 104 +++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 src/main/java/org/apache/logging/slf4j/Log4jLogger.java diff --git a/src/main/java/org/apache/logging/slf4j/Log4jLogger.java b/src/main/java/org/apache/logging/slf4j/Log4jLogger.java new file mode 100644 index 0000000..eb06c77 --- /dev/null +++ b/src/main/java/org/apache/logging/slf4j/Log4jLogger.java @@ -0,0 +1,104 @@ +package org.apache.logging.slf4j; + +import org.apache.logging.log4j.spi.ExtendedLogger; +import org.slf4j.Marker; +import org.slf4j.event.Level; +import org.slf4j.spi.LocationAwareLogger; +import org.slf4j.spi.LoggingEventBuilder; + +import java.io.Serializable; + + +/** + *

FU** this fu***** damn sh** code that still tries to use log4j, no matter + * how strong we tell it NOT to use it!

+ * + *

This class only exists to prevent services from starting if IDEA still + * did miss the dependency changes in pom and still tries to use the wrong + * logger impl. So that I once and for all time can stop wasting my time + * waiting for logs which never arive because the wrong logger still is used + * somewhere.

+ */ +public class Log4jLogger implements LocationAwareLogger, Serializable { + + private final org.slf4j.Logger log; + + Log4jLogger(final Log4jMarkerFactory markerFactory, final ExtendedLogger logger, final String name) { + this.log = new org.slf4j.simple.SimpleLoggerFactory().getLogger(name); + } + + @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) { + throw new UnsupportedOperationException(/*TODO*/"Not impl yet"); + } + + @Override public String getName() { return log.getName(); } + @Override public LoggingEventBuilder makeLoggingEventBuilder(Level level) { return log.makeLoggingEventBuilder(level); } + @Override public LoggingEventBuilder atLevel(Level level) { return log.atLevel(level); } + @Override public boolean isEnabledForLevel(Level level) { return log.isEnabledForLevel(level); } + @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); } + @Override public void trace(String s) { log.trace(s); } + @Override public void trace(String s, Object o) { log.trace(s, o); } + @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); } + @Override public void trace(String s, Object... objects) { log.trace(s, objects); } + @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); } + @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); } + @Override public LoggingEventBuilder atTrace() { return log.atTrace(); } + @Override public void trace(Marker marker, String s) { log.trace(marker, s); } + @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); } + @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); } + @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); } + @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); } + @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); } + @Override public void debug(String s) { log.debug(s); } + @Override public void debug(String s, Object o) { log.debug(s, o); } + @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); } + @Override public void debug(String s, Object... objects) { log.debug(s, objects); } + @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); } + @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); } + @Override public void debug(Marker marker, String s) { log.debug(marker, s); } + @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); } + @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); } + @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); } + @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); } + @Override public LoggingEventBuilder atDebug() { return log.atDebug(); } + @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); } + @Override public void info(String s) { log.info(s); } + @Override public void info(String s, Object o) { log.info(s, o); } + @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); } + @Override public void info(String s, Object... objects) { log.info(s, objects); } + @Override public void info(String s, Throwable throwable) { log.info(s, throwable); } + @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); } + @Override public void info(Marker marker, String s) { log.info(marker, s); } + @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); } + @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); } + @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); } + @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); } + @Override public LoggingEventBuilder atInfo() { return log.atInfo(); } + @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); } + @Override public void warn(String s) { log.warn(s); } + @Override public void warn(String s, Object o) { log.warn(s, o); } + @Override public void warn(String s, Object... objects) { log.warn(s, objects); } + @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); } + @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); } + @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); } + @Override public void warn(Marker marker, String s) { log.warn(marker, s); } + @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); } + @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); } + @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); } + @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); } + @Override public LoggingEventBuilder atWarn() { return log.atWarn(); } + @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); } + @Override public void error(String s) { log.error(s); } + @Override public void error(String s, Object o) { log.error(s, o); } + @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); } + @Override public void error(String s, Object... objects) { log.error(s, objects); } + @Override public void error(String s, Throwable throwable) { log.error(s, throwable); } + @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); } + @Override public void error(Marker marker, String s) { log.error(marker, s); } + @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); } + @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); } + @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); } + @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); } + @Override public LoggingEventBuilder atError() { return log.atError(); } + +} -- cgit v1.1 From c9c1cc90fcf45d456dd7d532230012af17382ef8 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 1 Dec 2023 14:13:31 +0100 Subject: Add how to gen an ed25519 ssh key. Fix missing escaping in gateleen build --- doc/note/qemu/build-gateleen.txt | 4 ++-- doc/note/ssh/ssh-setup.txt | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/doc/note/qemu/build-gateleen.txt b/doc/note/qemu/build-gateleen.txt index 9666aa6..c29fcdc 100644 --- a/doc/note/qemu/build-gateleen.txt +++ b/doc/note/qemu/build-gateleen.txt @@ -39,8 +39,8 @@ true \ && printf "require('/usr/lib/node_modules/npm/lib/cli.js')\n" | tee gateleen-hook-js/node/node_modules/npm/bin/npm-cli.js >/dev/null \ && mvn install -PpublicRepos -DskipTests -Dskip.installnodenpm -pl gateleen-hook-js \ && mvn install -PpublicRepos -DfailIfNoTests=false \ - -pl !gateleen-test,!gateleen-hook-js \ - -Dtest=!ReleaseLockLuaScriptTests,!RedisCacheStorageTest,!DeltaHandlerTest,!QueueCircuitBreakerCloseCircuitLuaScriptTests,!QueueCircuitBreakerGetAllCircuitsLuaScriptTests,!QueueCircuitBreakerHalfOpenCircuitsLuaScriptTests,!QueueCircuitBreakerReOpenCircuitLuaScriptTests,!QueueCircuitBreakerUpdateStatsLuaScriptTests,!RemoveExpiredQueuesLuaScriptTests,!StartQueueTimerLuaScriptTests \ + -pl '!gateleen-test,!gateleen-hook-js' \ + '-Dtest=!ReleaseLockLuaScriptTests,!RedisCacheStorageTest,!DeltaHandlerTest,!QueueCircuitBreakerCloseCircuitLuaScriptTests,!QueueCircuitBreakerGetAllCircuitsLuaScriptTests,!QueueCircuitBreakerHalfOpenCircuitsLuaScriptTests,!QueueCircuitBreakerReOpenCircuitLuaScriptTests,!QueueCircuitBreakerUpdateStatsLuaScriptTests,!RemoveExpiredQueuesLuaScriptTests,!StartQueueTimerLuaScriptTests' \ && mkdir "${WORKDIR:?}/classpath" \ && (cd gateleen-playground && mvn dependency:copy-dependencies \ -DexcludeScope=provided -DoutputDirectory="${WORKDIR:?}/classpath/.") \ diff --git a/doc/note/ssh/ssh-setup.txt b/doc/note/ssh/ssh-setup.txt index 6a2812b..9f3dde2 100644 --- a/doc/note/ssh/ssh-setup.txt +++ b/doc/note/ssh/ssh-setup.txt @@ -7,6 +7,7 @@ SSH Setup & Key Management - [Insane answer about key formats](https://stackoverflow.com/a/29707204/4415884) + ## Create New Ssh Key Create "path/to/key" and "path/to/key.pub" as a 2048 bit RSA with @@ -16,6 +17,13 @@ Create "path/to/key" and "path/to/key.pub" as a 2048 bit RSA with ssh-keygen -t rsa -b 2048 -f path/to/key -C "your comment" ``` +Create "path/to/key" and "path/to/key.pub" as an elliptic curve. + +```sh +ssh-keygen -t ed25519 -f path/to/key -C "your comment" +``` + + ## Change Passphrase @@ -26,6 +34,7 @@ ssh-keygen -p -f path/to/key NOTE: Just hitting enter when asked for the new one will remove the passphrase. + ## Inspect keys Print public key hash: @@ -46,6 +55,7 @@ Print detailed DER file content: openssl x509 -in dumpcertfile -inform DER -text + ## Export pub key in misc formats ssh-keygen -e -f path/to/ssh2pub-or-privKey -m PKCS8 @@ -62,11 +72,13 @@ TODO to PKCS8: ssh-keygen -i -f path/to/key.pub -e -m PKCS8 > path/to/pub.pem + ## Remove obsolete entry from known_hosts ssh-keygen -f path/to/known_hosts -R "example.com" + ## TODO -e This option will read a private or public OpenSSH key file -- cgit v1.1 From dc9ac9f9af67bd40bd16b1d54dafecd66900be51 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 3 Dec 2023 00:29:53 +0100 Subject: (qemu, android) Try to build a HelloWorld --- doc/note/qemu/setup-android-env.txt | 70 +++++++++++++++++++++++++++---------- 1 file changed, 51 insertions(+), 19 deletions(-) diff --git a/doc/note/qemu/setup-android-env.txt b/doc/note/qemu/setup-android-env.txt index 97f8801..e93f2a0 100644 --- a/doc/note/qemu/setup-android-env.txt +++ b/doc/note/qemu/setup-android-env.txt @@ -1,38 +1,70 @@ # # Tools for Android development. # -# HINT: Migration to qemu not yet tested. +# HINT: Since JDK-8 is no longer available, we have to add clutter to +# apksigner command. Eg: +# apksigner -J-add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED \ +# -J-add-opens=java.base/sun.security.x509=ALL-UNNAMED \ +# -J-add-opens=java.base/sun.security.pkcs=ALL-UNNAMED +# +# Refs: +# - [Clean Android HelloWorld Tutorial](https://www.hanshq.net/command-line-android.html) +# - [List of available versions](https://dl.google.com/android/repository/repository-11.xml) # set -e ### Made for debian 10 (alias buster) true \ - && PKGS_TO_ADD="curl unzip openjdk-11-jdk-headless aapt apksigner zipalign" \ - && PKGS_TO_DEL="curl unzip" \ - && PKGINIT="apt-get update" \ - && PKGADD="apt-get install -y --no-install-recommends" \ - && PKGDEL="apt-get purge -y" \ - && PKGCLEAN="apt-get clean" \ - && PLATFORM_VERSION="22" \ - && BUILD_TOOLS_VERSION="22.0.1" \ + && PKGS_TO_ADD="curl unzip openjdk-17-jdk-headless aapt apksigner zipalign" \ + && SUDO=sudo \ + && PKGINIT="$SUDO apt update" \ + && PKGADD="$SUDO apt install -y --no-install-recommends" \ + && PKGCLEAN="$SUDO apt clean" \ + && PLATFORM_VERSION="24" \ + && BUILD_TOOLS_VERSION="34.0.0" \ && CMDLINETOOLS_URL="https://dl.google.com/android/repository/commandlinetools-linux-8512546_latest.zip" \ - \ - && export ANDROID_HOME="/usr/lib/android-sdk" \ - && export PATH="$PATH:/usr/lib/android-sdk/build-tools/debian:/usr/lib/android-sdk/cmdline-tools/latest/bin:/usr/lib/android-sdk/build-tools/$BUILD_TOOLS_VERSION" \ + && BUILD_TOOLS_URL="https://dl.google.com/android/repository/build-tools_r25-linux.zip" \ + && PLATFORM_URL="https://dl.google.com/android/repository/platform-${PLATFORM_VERSION:?}_r02.zip" \ + && BUILD_TOOLS_ZIP="$(basename "${BUILD_TOOLS_URL:?}")" \ \ && $PKGINIT \ && $PKGADD $PKGS_TO_ADD \ + && `# Cmdline tools` \ && (cd /var/tmp && curl -sSLO "${CMDLINETOOLS_URL:?}") \ && if test -x /tmp/cmdline-tools; then echo >&2 "[ERROR] /tmp/cmdline-tools already exists"; false; fi \ - && (cd /tmp && unzip /var/tmp/$(basename "$CMDLINETOOLS_URL") >/dev/null) \ - && mkdir /usr/lib/android-sdk/cmdline-tools \ - && mkdir /usr/lib/android-sdk/cmdline-tools/latest \ - && mv /tmp/cmdline-tools/* /usr/lib/android-sdk/cmdline-tools/latest/. \ - && yes | sdkmanager --install "platforms;android-${PLATFORM_VERSION:?}" "build-tools;${BUILD_TOOLS_VERSION:?}" \ + && (cd /tmp && unzip /var/tmp/$(basename "${CMDLINETOOLS_URL:?}") >/dev/null) \ + && $SUDO mkdir /usr/lib/android-sdk/cmdline-tools \ + && $SUDO mkdir /usr/lib/android-sdk/cmdline-tools/latest \ + && (cd /tmp/cmdline-tools && tar --owner=0 --group=0 -c bin lib source.properties) | (cd /usr/lib/android-sdk/cmdline-tools/latest && $SUDO tar x) \ + && `# Build Tools` \ + && (cd /var/tmp && curl -sSL "${BUILD_TOOLS_URL:?}" -o "${BUILD_TOOLS_ZIP:?}") \ + && mkdir "/tmp/${BUILD_TOOLS_ZIP%.*}" \ + && (cd "/tmp/${BUILD_TOOLS_ZIP%.*}" && unzip "/var/tmp/${BUILD_TOOLS_ZIP:?}") \ + && (cd "/tmp/${BUILD_TOOLS_ZIP%.*}" && tar --owner=0 --group=0 -c *) \ + | (cd /usr/lib/android-sdk/build-tools && $SUDO tar x) \ + && $SUDO find /usr/lib/android-sdk/build-tools -type d -exec chmod 755 {} + \ && `# Those for some reason are broken (wrong linker) so use the debian variant.` \ - && (cd "/usr/lib/android-sdk/build-tools/${BUILD_TOOLS_VERSION:?}" && rm aapt zipalign) \ - && $PKGDEL $PKGS_TO_DEL \ + && (cd "/usr/lib/android-sdk/build-tools/android*" && $SUDO rm aapt zipalign) \ + && `# Platform` \ + && (cd /var/tmp && curl -sSLO "${PLATFORM_URL:?}") \ + && if test -x /tmp/android*; then echo >&2 '[ERROR] /tmp/android* already exists'; false; fi \ + && (cd /tmp && unzip /var/tmp/$(basename "${PLATFORM_URL:?}") >/dev/null) \ + && $SUDO mkdir /usr/lib/android-sdk/platforms \ + && (cd /tmp && tar --owner=0 --group=0 -c android-*) | (cd /usr/lib/android-sdk/platforms && $SUDO tar x) \ + && $SUDO find /usr/lib/android-sdk/platforms/android-* -type d -exec chmod o+rx {} + \ + && $SUDO find /usr/lib/android-sdk/platforms/android-* -type f -exec chmod o+r {} + \ + && `# Environ` \ + && printf >>~/.profile '%s\n' \ + "PATH=/usr/lib/android-sdk/build-tools/debian:\$PATH" \ + "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android*):\$PATH\"" \ + "CLASSPATH=/usr/lib/android-sdk/build-tools/android-7.1.1/lib/dx.jar" \ + "LD_LIBRARY_PATH=/usr/lib/android-sdk/build-tools/android-7.1.1/lib64:\$LD_LIBRARY_PATH" \ + "export PATH" \ + "export CLASSPATH" \ + "export LD_LIBRARY_PATH" \ + && `# Cleanup` \ && $PKGCLEAN \ && rm -rf /tmp/* \ + && printf '\n Done :)\n\n' \ && true -- cgit v1.1 From cccbbbbc8f30319bcfea05f7da7c153f9e3f47ab Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 3 Dec 2023 01:08:43 +0100 Subject: (qemu android) Fix few mistakes. --- doc/note/qemu/setup-android-env.txt | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/doc/note/qemu/setup-android-env.txt b/doc/note/qemu/setup-android-env.txt index e93f2a0..a45128a 100644 --- a/doc/note/qemu/setup-android-env.txt +++ b/doc/note/qemu/setup-android-env.txt @@ -26,6 +26,7 @@ true \ && BUILD_TOOLS_URL="https://dl.google.com/android/repository/build-tools_r25-linux.zip" \ && PLATFORM_URL="https://dl.google.com/android/repository/platform-${PLATFORM_VERSION:?}_r02.zip" \ && BUILD_TOOLS_ZIP="$(basename "${BUILD_TOOLS_URL:?}")" \ + && PLATFORM_ZIP="$(basename "${PLATFORM_URL:?}")" \ \ && $PKGINIT \ && $PKGADD $PKGS_TO_ADD \ @@ -44,27 +45,29 @@ true \ | (cd /usr/lib/android-sdk/build-tools && $SUDO tar x) \ && $SUDO find /usr/lib/android-sdk/build-tools -type d -exec chmod 755 {} + \ && `# Those for some reason are broken (wrong linker) so use the debian variant.` \ - && (cd "/usr/lib/android-sdk/build-tools/android*" && $SUDO rm aapt zipalign) \ + && (cd /usr/lib/android-sdk/build-tools/android* && $SUDO rm aapt zipalign) \ && `# Platform` \ - && (cd /var/tmp && curl -sSLO "${PLATFORM_URL:?}") \ + && (cd /var/tmp && curl -sSL "${PLATFORM_URL:?}" -o "${PLATFORM_ZIP:?}") \ && if test -x /tmp/android*; then echo >&2 '[ERROR] /tmp/android* already exists'; false; fi \ - && (cd /tmp && unzip /var/tmp/$(basename "${PLATFORM_URL:?}") >/dev/null) \ + && (cd /tmp && unzip "/var/tmp/${PLATFORM_ZIP:?}" >/dev/null) \ && $SUDO mkdir /usr/lib/android-sdk/platforms \ - && (cd /tmp && tar --owner=0 --group=0 -c android-*) | (cd /usr/lib/android-sdk/platforms && $SUDO tar x) \ + && (cd /tmp && mv android-* "android-${PLATFORM_VERSION:?}") \ + && (cd /tmp && tar --owner=0 --group=0 -c "android-${PLATFORM_VERSION:?}") \ + | (cd /usr/lib/android-sdk/platforms && $SUDO tar x) \ && $SUDO find /usr/lib/android-sdk/platforms/android-* -type d -exec chmod o+rx {} + \ && $SUDO find /usr/lib/android-sdk/platforms/android-* -type f -exec chmod o+r {} + \ && `# Environ` \ && printf >>~/.profile '%s\n' \ "PATH=/usr/lib/android-sdk/build-tools/debian:\$PATH" \ - "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android*):\$PATH\"" \ - "CLASSPATH=/usr/lib/android-sdk/build-tools/android-7.1.1/lib/dx.jar" \ - "LD_LIBRARY_PATH=/usr/lib/android-sdk/build-tools/android-7.1.1/lib64:\$LD_LIBRARY_PATH" \ + "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android-*):\$PATH\"" \ + "CLASSPATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib/dx.jar)" \ + "LD_LIBRARY_PATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib64):\$LD_LIBRARY_PATH" \ "export PATH" \ "export CLASSPATH" \ "export LD_LIBRARY_PATH" \ && `# Cleanup` \ && $PKGCLEAN \ - && rm -rf /tmp/* \ - && printf '\n Done :)\n\n' \ + && rm -rf /tmp/* 2>/dev/null || true \ + && printf '\n Done :)\n\n Logout and login to get your new environ from ~/.profile\n\n' \ && true -- cgit v1.1 From a4c2b1ccbe50655fdc4829ded9710226084d0e79 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 3 Dec 2023 03:00:23 +0100 Subject: (qemu android) Install ADB from debian repo. Give up adb from within VM. Just use host --- doc/note/qemu/qemu.txt | 2 ++ doc/note/qemu/setup-android-env.txt | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 1c98baa..e2d990a 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -51,6 +51,8 @@ qemu-system-x86_64 \ `# socket mcast shared network adapter` \ -netdev socket,id=n1,ipv6=off,mcast=230.0.0.1:1234 \ -device e1000,netdev=n1 \ + `# USB pass-through (WARN: untested!)` \ + -usb -device usb-host,id=myUsbQemuId,vendorid=0xFFFF,productid=0xFFFF \ `# Fix broken host systems` \ -L "${QEMU_HOME:?}/Bios" -bios "${QEMU_HOME:?}/Bios/bios-256k.bin" \ `# Fix broken guest systems` \ diff --git a/doc/note/qemu/setup-android-env.txt b/doc/note/qemu/setup-android-env.txt index a45128a..828c502 100644 --- a/doc/note/qemu/setup-android-env.txt +++ b/doc/note/qemu/setup-android-env.txt @@ -15,7 +15,7 @@ set -e ### Made for debian 10 (alias buster) true \ - && PKGS_TO_ADD="curl unzip openjdk-17-jdk-headless aapt apksigner zipalign" \ + && PKGS_TO_ADD="curl unzip openjdk-17-jdk-headless aapt apksigner zipalign adb android-sdk-platform-tools-common" \ && SUDO=sudo \ && PKGINIT="$SUDO apt update" \ && PKGADD="$SUDO apt install -y --no-install-recommends" \ @@ -60,6 +60,7 @@ true \ && printf >>~/.profile '%s\n' \ "PATH=/usr/lib/android-sdk/build-tools/debian:\$PATH" \ "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android-*):\$PATH\"" \ + "PATH=\"/usr/lib/android-sdk/platform-tools:$PATH\"" \ "CLASSPATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib/dx.jar)" \ "LD_LIBRARY_PATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib64):\$LD_LIBRARY_PATH" \ "export PATH" \ -- cgit v1.1 From bac2374653a16b39204fbedfe1c0752529f303f8 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 7 Dec 2023 10:47:25 +0100 Subject: (qemu) Duplicate params for shitty systems --- doc/note/qemu/qemu.txt | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 1c98baa..939d706 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -51,6 +51,32 @@ qemu-system-x86_64 \ `# socket mcast shared network adapter` \ -netdev socket,id=n1,ipv6=off,mcast=230.0.0.1:1234 \ -device e1000,netdev=n1 \ + `# Fix broken guest systems` \ + -device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet \ + `# Choose ONE of those for graphic output` \ + -nographic \ + -display sdl,grab-mod=rctrl \ + -display gtk,show-menubar=on \ + -display vnc=127.0.0.1:0,to=99 `#HINT: 0 is port 5900` \ + ; + +## Example Params for BROKEN host systems +qemu-system-x86_64 \ + -machine pc,accel=tcg -m size=2G -smp cores=$(nproc) \ + -monitor stdio \ + `# Drives & Boot.` \ + -boot order=dc \ + -cdrom "path/to/cd.iso" \ + -hda "$(dirname "$(realpath "$0")")/hda.qcow2" \ + `# Isolated Network plus host port/cmd reachable from guest` \ + -netdev 'user,id=n1,ipv6=off,restrict=y,guestfwd=guestfwd=tcp:10.0.2.9:80-cmd:ncat 127.0.0.1 80' \ + -device e1000,netdev=n1 \ + `# 10.0.2.x network with host redirect` \ + -netdev user,id=n0,ipv6=off,hostfwd=tcp:127.0.0.1:${SSH_PORT:-2222}-:22 \ + -device e1000,netdev=n0 \ + `# socket mcast shared network adapter` \ + -netdev socket,id=n1,ipv6=off,mcast=230.0.0.1:1234 \ + -device e1000,netdev=n1 \ `# Fix broken host systems` \ -L "${QEMU_HOME:?}/Bios" -bios "${QEMU_HOME:?}/Bios/bios-256k.bin" \ `# Fix broken guest systems` \ -- cgit v1.1 From 419cecfd1a790bcaa8f650270e64efbd5e9cc13f Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 13 Dec 2023 11:27:28 +0100 Subject: Add link about transport resilience. --- doc/note/links/links.txt | 4 ++++ doc/note/qemu/qemu.txt | 4 +++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 75baf2f..7318837 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -426,6 +426,10 @@ Links (Aka argument amplifiers) - [Thought DB streams](https://wikit.post.ch/pages/viewpage.action?pageId=993270063&focusedCommentId=993272727#comment-993272727) - [How To Prevent Data Loss On A Non-Reliable Transport Channel](https://wikit.post.ch/x/4y_nQg) +## Resilience reliable end-to-end transport +- [How To Prevent Data Loss On A Non-Reliable Transport Channel](https://wikit.post.ch/x/4y_nQg) +- [Houston losing hook messages](https://jira.post.ch/browse/SDCISA-13346) + ## Bugs, Frameworks, Dependencies include them all - "https://medium.com/dinahmoe/escape-dependency-hell-b289de537403" - "https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/119/overview?commentId=46245" diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index a43f808..7e90598 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -62,7 +62,9 @@ qemu-system-x86_64 \ ## Broken systems likely need some of those too `# Fix broken hosts` \ - -machine pc,accel=tcg \ + `# TODO test accel=hax` \ + `# TODO test accel=whpx` \ + -machine pc,accel=hax \ -L "${QEMU_HOME:?}/Bios" -bios "${QEMU_HOME:?}/Bios/bios-256k.bin" \ `# Fix broken guests` \ -device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet \ -- cgit v1.1 From 5ddd3ea32a360af171c43b3455ce6b730d997e2b Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 15 Dec 2023 18:14:24 +0100 Subject: Add some links --- doc/note/links/links.txt | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 7318837..b8862f8 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -248,6 +248,8 @@ Links (Aka argument amplifiers) - [Update beim Fahrzeughersteller dauert zu lange](https://jira.post.ch/browse/SDCISA-9059) - [vortex too slow](https://jira.post.ch/browse/SDCISA-9990) - [2023-10-27 OOM nun auch auf Eagle](https://wikit.post.ch/x/c2U1Tw) +- [Fahrplanimports slow](https://jira.post.ch/browse/SDCISA-11528) +- [Jenkinsbuild too slow](https://jira.post.ch/browse/SDCISA-14313?focusedId=1914236&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1914236) ## Performance is not an issue ... - [Houston OOM 2023-06-27](https://wikit.post.ch/x/_Bv6Rw) @@ -424,11 +426,12 @@ Links (Aka argument amplifiers) ## Resilience limit upper bound - [Thought OOM](https://jira.post.ch/browse/SDCISA-10021) - [Thought DB streams](https://wikit.post.ch/pages/viewpage.action?pageId=993270063&focusedCommentId=993272727#comment-993272727) -- [How To Prevent Data Loss On A Non-Reliable Transport Channel](https://wikit.post.ch/x/4y_nQg) +- [in-memory buffer OOM](https://gitit.post.ch/projects/ISA/repos/nsync/pull-requests/55/overview?commentId=324715) ## Resilience reliable end-to-end transport - [How To Prevent Data Loss On A Non-Reliable Transport Channel](https://wikit.post.ch/x/4y_nQg) - [Houston losing hook messages](https://jira.post.ch/browse/SDCISA-13346) +- [Not interested in the fix](https://jira.post.ch/browse/SDCISA-11619?focusedId=1913186&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1913186) ## Bugs, Frameworks, Dependencies include them all - "https://medium.com/dinahmoe/escape-dependency-hell-b289de537403" -- cgit v1.1 From f93cc067c38ed7a90d1866810f6ba068ee4f533d Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 21 Dec 2023 13:06:57 +0100 Subject: add 'dx' to path in android qemu --- doc/note/qemu/setup-android-env.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/note/qemu/setup-android-env.txt b/doc/note/qemu/setup-android-env.txt index 828c502..5e8c0a0 100644 --- a/doc/note/qemu/setup-android-env.txt +++ b/doc/note/qemu/setup-android-env.txt @@ -61,6 +61,7 @@ true \ "PATH=/usr/lib/android-sdk/build-tools/debian:\$PATH" \ "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android-*):\$PATH\"" \ "PATH=\"/usr/lib/android-sdk/platform-tools:$PATH\"" \ + "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android-*):$PATH\"" \ "CLASSPATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib/dx.jar)" \ "LD_LIBRARY_PATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib64):\$LD_LIBRARY_PATH" \ "export PATH" \ -- cgit v1.1 From 9aba36876a4303887b63d16333d3bbb28bf75cfd Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 21 Dec 2023 13:44:05 +0100 Subject: (android qemu) Fix broken path. --- doc/note/qemu/setup-android-env.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/note/qemu/setup-android-env.txt b/doc/note/qemu/setup-android-env.txt index 5e8c0a0..1589e6f 100644 --- a/doc/note/qemu/setup-android-env.txt +++ b/doc/note/qemu/setup-android-env.txt @@ -60,8 +60,8 @@ true \ && printf >>~/.profile '%s\n' \ "PATH=/usr/lib/android-sdk/build-tools/debian:\$PATH" \ "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android-*):\$PATH\"" \ - "PATH=\"/usr/lib/android-sdk/platform-tools:$PATH\"" \ - "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android-*):$PATH\"" \ + "PATH=\"/usr/lib/android-sdk/platform-tools:\$PATH\"" \ + "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android-*):\$PATH\"" \ "CLASSPATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib/dx.jar)" \ "LD_LIBRARY_PATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib64):\$LD_LIBRARY_PATH" \ "export PATH" \ -- cgit v1.1 From 650118d890416c048f37c1be75f184f470ac7a40 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 22 Dec 2023 23:32:44 +0100 Subject: (windoof) note about how to disable disk bloating through updates. --- doc/note/windoof/kill-auto-update.txt | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 doc/note/windoof/kill-auto-update.txt diff --git a/doc/note/windoof/kill-auto-update.txt b/doc/note/windoof/kill-auto-update.txt new file mode 100644 index 0000000..3ccb20a --- /dev/null +++ b/doc/note/windoof/kill-auto-update.txt @@ -0,0 +1,18 @@ + +Stop Automatic windoof updates +============================== + +For example in Virtual machines running in isolated environments +shouting auto updates are nothing than annoying. Further, they +continuously bloat VM images for no reason and make snapshotting +unneccesarily tedious. + + + +## Stop annoying updates + +Windows Registry Editor Version 5.00 +[HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\WindowsUpdate\AU] +"NoAutoUpdate"=dword:00000001 + + -- cgit v1.1 From 1bc5117071096984759b64d056199f35d4444a89 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 29 Dec 2023 01:31:12 +0100 Subject: (android qemu) Add NDK to setup --- doc/note/qemu/setup-android-env.txt | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/doc/note/qemu/setup-android-env.txt b/doc/note/qemu/setup-android-env.txt index 1589e6f..0008c16 100644 --- a/doc/note/qemu/setup-android-env.txt +++ b/doc/note/qemu/setup-android-env.txt @@ -25,8 +25,10 @@ true \ && CMDLINETOOLS_URL="https://dl.google.com/android/repository/commandlinetools-linux-8512546_latest.zip" \ && BUILD_TOOLS_URL="https://dl.google.com/android/repository/build-tools_r25-linux.zip" \ && PLATFORM_URL="https://dl.google.com/android/repository/platform-${PLATFORM_VERSION:?}_r02.zip" \ + && NDK_URL="https://dl.google.com/android/repository/android-ndk-r26b-linux.zip" \ && BUILD_TOOLS_ZIP="$(basename "${BUILD_TOOLS_URL:?}")" \ && PLATFORM_ZIP="$(basename "${PLATFORM_URL:?}")" \ + && NDK_ZIP="$(basename "${NDK_URL:?}")" \ \ && $PKGINIT \ && $PKGADD $PKGS_TO_ADD \ @@ -67,6 +69,14 @@ true \ "export PATH" \ "export CLASSPATH" \ "export LD_LIBRARY_PATH" \ + && `# NDK` \ + && (cd /var/tmp && curl -sSL "${NDK_URL:?}" -o "${NDK_ZIP:?}") \ + && mkdir "/tmp/${NDK_ZIP%.*}" \ + && (cd "/tmp/${NDK_ZIP%.*}" && unzip "/var/tmp/${NDK_ZIP:?}") \ + && `# TODO may worth throw away some of that garbage before moving it into place` \ + && (cd "/tmp/${NDK_ZIP%.*}" && tar --owner=0 --group=0 -c android-ndk-*) \ + | (cd "/usr/lib" && $SUDO tar x) \ + && $SUDO ln -s /usr/lib/android-ndk-* "/usr/lib/android-ndk" \ && `# Cleanup` \ && $PKGCLEAN \ && rm -rf /tmp/* 2>/dev/null || true \ -- cgit v1.1 From 73fc83487c5984dc6d6edc68cd7e699484af1beb Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sun, 31 Dec 2023 17:30:41 +0100 Subject: WTF?!? How terribly ugly is that language?!? --- src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh | 25 ++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh diff --git a/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh new file mode 100644 index 0000000..3fba3a6 --- /dev/null +++ b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh @@ -0,0 +1,25 @@ +#!/bin/sh +# +# Once used to batch process some PDFs. This is NOT functional. It is only here +# for reference purposes. +# + +true \ + && pdftk \ + A=scan.pdf \ + B=scan0001.pdf \ + shuffle A Bend-1 \ + output all-pages.pdf \ + && pdftk \ + all-pages.pdf \ + cat 1 3 5 7 8 9 11 12 13 14 15 16 17 18 19 20 21 22 23 24 \ + output pages-with-content.pdf \ + && if test ! -d pages-with-content-img; then true \ + && mkdir -p pages-with-content-img \ + && pdfimages -all pages-with-content.pdf pages-with-content-img/img \ + ;fi \ + && X=pZVNc9owEIbv+RVbn+R2BBZJP5i0B0+jJHQoZBwnd9UWQqktq0iQ9N9XNrKBdiaHSozAK/aR1vu+Y6OK27eAzsANJJVasJobQJU0tltrRzS2tR7XXCozNgVTZKyZ4AY/S7vGRaMsVxbLWozdxEmSjJ60iP6XJkH0JIg+D6Ivguj3QfSHIPpjEP0piJ6G0CTIayTIayTIayTIayTIayTIayTIayTIayTIa+TIa3F88siDxMfN1t4pMYSOG67LDXtmP6pDrixfABMfFMrCRXvdTlTylVQckF1LMzPftsam19vip1TiitVq3jQaHO1RvZEOPsSG2zfQF4aUXbf/9QsmPsly9aGCbQCtZMXxk+YCVw0rgQwb+N/4hBvuZQ8LWWvXJ9c7LLjFrLByx3HFfvNNe8TAdnk9iyu+45U57HU7u8+XN1n6HT+m8wfqVUpGk/ZrCnCdzu8pwKUrSW/71wsZJXAyLkGwumbHdJcz0E6iHj8ua1++E33HN050VfIXXnYN+rpcPNIsx1ez/JZmeLFc0GHtLp3TPKf4hi5oluYUzif+pDx7oBBFx13z5kDGOsUEZlpzVQ6NjnBbx0grEcUnyv6VHxm2cyG0ioFtIPL79lCnpNsFuzzeCuluYeixL6G3KUzd9J9/m1Fy93bnnYLesStAn790dnJ+jV/1J3rX5ZG9+J2rX8nuTt+f/WsrbRvHZ38AVrSGVg== \ + && echo $X|base64 -d|inflate| gimp -nidfsc -b - \ + && echo DONE \ + + -- cgit v1.1 From 7d4fdc5c2112560bec5ce40b334bfac7fa2b6d4b Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sun, 31 Dec 2023 17:45:05 +0100 Subject: Fix double-call in loop --- src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh index 3fba3a6..bc32f99 100644 --- a/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh +++ b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh @@ -17,8 +17,8 @@ true \ && if test ! -d pages-with-content-img; then true \ && mkdir -p pages-with-content-img \ && pdfimages -all pages-with-content.pdf pages-with-content-img/img \ - ;fi \ - && X=pZVNc9owEIbv+RVbn+R2BBZJP5i0B0+jJHQoZBwnd9UWQqktq0iQ9N9XNrKBdiaHSozAK/aR1vu+Y6OK27eAzsANJJVasJobQJU0tltrRzS2tR7XXCozNgVTZKyZ4AY/S7vGRaMsVxbLWozdxEmSjJ60iP6XJkH0JIg+D6Ivguj3QfSHIPpjEP0piJ6G0CTIayTIayTIayTIayTIayTIayTIayTIayTIa+TIa3F88siDxMfN1t4pMYSOG67LDXtmP6pDrixfABMfFMrCRXvdTlTylVQckF1LMzPftsam19vip1TiitVq3jQaHO1RvZEOPsSG2zfQF4aUXbf/9QsmPsly9aGCbQCtZMXxk+YCVw0rgQwb+N/4hBvuZQ8LWWvXJ9c7LLjFrLByx3HFfvNNe8TAdnk9iyu+45U57HU7u8+XN1n6HT+m8wfqVUpGk/ZrCnCdzu8pwKUrSW/71wsZJXAyLkGwumbHdJcz0E6iHj8ua1++E33HN050VfIXXnYN+rpcPNIsx1ez/JZmeLFc0GHtLp3TPKf4hi5oluYUzif+pDx7oBBFx13z5kDGOsUEZlpzVQ6NjnBbx0grEcUnyv6VHxm2cyG0ioFtIPL79lCnpNsFuzzeCuluYeixL6G3KUzd9J9/m1Fy93bnnYLesStAn790dnJ+jV/1J3rX5ZG9+J2rX8nuTt+f/WsrbRvHZ38AVrSGVg== \ + ;fi; true \ + && X=pZZNc9owEIbv+RVbn+RmBBZp0zLpxdM4CR0KGeLkrtiLUGrLLhIk/feVjS2HTCeHSoyAtfdZ7cc7BlKg+QjkBOwiUqkFL1EDKaQ27bVmBWNT1uMSpdJjnXHFxjUXqOmzNBuaVcqgMlSWYmw3jaJo9FSL4H9p5kVPvOgzL/qTF/3Ziz73or940V+96KkPzby0xry0xry0xry0xry0xry0xry0xry0xry0xl5pLQyPHnkQdXa1M7dKONNy7nu+5c/8sRh8Zf4ClHVGpgxM2jvNJjmupUIgZiP1TP/YaRNf7bJfUolLXqp5VdVg8T7QGsi3xgYbJQTyiEKqrkii0XyAPk2izKZ17B/VXR3Oz+ZLMr4FspYF0qcaBS0qngNzIbrP8A3pqjvgQpa17ZztJhVoKM+M3CMt+B/cNoe8olvPnqYF7rHQQ7Sb2V26vF7FP+lDPL9P3OSi0aR5mwJcxfO7BODCJlbvhh8dNorgaF2A4GXJjyO0Xi6CHd4Q4jjBQylWEnvcWkmoHF8wb9v1fbl4SFYpvZylN8mKLpaLxF27jedJmib0OlkkqzhN4GzSnZau7hMIguMeduIh2mztoCmva1S5a3xAm0xGtRLB0L3aepq3RKD53prQzBBMBUEXecDa6dpI1HpiM1xbiOt5l0YvZZja3b3+1ZQc7X8AbKfa331HteS01R/rcgmd4t9h2mMPR/7eSdPY4clfwj6IxA== \ && echo $X|base64 -d|inflate| gimp -nidfsc -b - \ && echo DONE \ -- cgit v1.1 From 993f54e0d0a6c04a581256d3ff9d216418187336 Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sun, 31 Dec 2023 17:50:46 +0100 Subject: (gimp script) move new files to separate dir --- src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh index bc32f99..68caf52 100644 --- a/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh +++ b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh @@ -14,12 +14,11 @@ true \ all-pages.pdf \ cat 1 3 5 7 8 9 11 12 13 14 15 16 17 18 19 20 21 22 23 24 \ output pages-with-content.pdf \ - && if test ! -d pages-with-content-img; then true \ - && mkdir -p pages-with-content-img \ - && pdfimages -all pages-with-content.pdf pages-with-content-img/img \ - ;fi; true \ + && mkdir pages-with-content-img \ + && pdfimages -all pages-with-content.pdf pages-with-content-img/img \ && X=pZZNc9owEIbv+RVbn+RmBBZp0zLpxdM4CR0KGeLkrtiLUGrLLhIk/feVjS2HTCeHSoyAtfdZ7cc7BlKg+QjkBOwiUqkFL1EDKaQ27bVmBWNT1uMSpdJjnXHFxjUXqOmzNBuaVcqgMlSWYmw3jaJo9FSL4H9p5kVPvOgzL/qTF/3Ziz73or940V+96KkPzby0xry0xry0xry0xry0xry0xry0xry0xry0xl5pLQyPHnkQdXa1M7dKONNy7nu+5c/8sRh8Zf4ClHVGpgxM2jvNJjmupUIgZiP1TP/YaRNf7bJfUolLXqp5VdVg8T7QGsi3xgYbJQTyiEKqrkii0XyAPk2izKZ17B/VXR3Oz+ZLMr4FspYF0qcaBS0qngNzIbrP8A3pqjvgQpa17ZztJhVoKM+M3CMt+B/cNoe8olvPnqYF7rHQQ7Sb2V26vF7FP+lDPL9P3OSi0aR5mwJcxfO7BODCJlbvhh8dNorgaF2A4GXJjyO0Xi6CHd4Q4jjBQylWEnvcWkmoHF8wb9v1fbl4SFYpvZylN8mKLpaLxF27jedJmib0OlkkqzhN4GzSnZau7hMIguMeduIh2mztoCmva1S5a3xAm0xGtRLB0L3aepq3RKD53prQzBBMBUEXecDa6dpI1HpiM1xbiOt5l0YvZZja3b3+1ZQc7X8AbKfa331HteS01R/rcgmd4t9h2mMPR/7eSdPY4clfwj6IxA== \ && echo $X|base64 -d|inflate| gimp -nidfsc -b - \ - && echo DONE \ + && mkdir pages-image-adjusted \ + && mv -t pages-image-adjusted/. pages-with-content-img/*gimp.png \ -- cgit v1.1 From cb082556779bcb1999fafcd782ca10b4afd29ec1 Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sun, 31 Dec 2023 19:57:53 +0100 Subject: (gimp script) add another helper --- src/main/gimp/nek2023-scan2/arrange-pdf | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 src/main/gimp/nek2023-scan2/arrange-pdf diff --git a/src/main/gimp/nek2023-scan2/arrange-pdf b/src/main/gimp/nek2023-scan2/arrange-pdf new file mode 100644 index 0000000..e2d2c7b --- /dev/null +++ b/src/main/gimp/nek2023-scan2/arrange-pdf @@ -0,0 +1,32 @@ +#!/bin/sh +# +# Once used to batch process some PDFs. This is NOT functional. It is only here +# for reference purposes. +# +# scan.pdf +# scan0001.pdf +# scan0002.pdf +# scan0003.pdf +# scan0004.pdf +# scan0005.pdf +# + +true \ + && mkdir scan scan0001 scan0002 scan0003 scan0004 scan0005 \ + && pdfimages -all scan.pdf scan-tmp1/scan-img \ + && pdfimages -all scan0001.pdf scan0001-tmp1/scan0001-img \ + && pdfimages -all scan0002.pdf scan0002-tmp1/scan0002-img \ + && pdfimages -all scan0003.pdf scan0003-tmp1/scan0003-img \ + && pdfimages -all scan0004.pdf scan0004-tmp1/scan0004-img \ + && pdfimages -all scan0005.pdf scan0005-tmp1/scan0005-img \ + && X=nVXBcpswEL3nK7acRDuywUkPnvTCNErijmtnHJK7ArKiFAS1ZDf9+wosRB08GTligF28b/X27Y6MCqY/AzoDs5CQckFLpgAVQun2W7OCsS7rccmEVGOVUTlpn2NRchxF0eil5oFfbHxC7MQn1mwfOwOfQOgY0IvdMeCHqZ57AnvjxBoHQN8aB0DfGgfAD9d44Qk8d8aJ4gyAvuIMgL7iXDjjRKoDoC/VAdCX6ldnDKmG4cFxAZH1q62+k9y5BufsfEP/0KeijxX5K+DYOpnUEE8bp7lRztZCMkD6WaiZ+rFVOrneZr+E5Fe0lPOqqsHAu0RrQN8aH0yWENAT40La+pBi+hN0NJHUz21gd8zZOlyc4YsyugG0FgXDLzXjuKhoDrFLYd/hG6Srbg/noqyNapQzzJnGNNNix3BB/7JNs8l/6DayQ+OC7Vih+my3s/t0ebNKfuLHZP5AXNOi0aR5TAGuk/k9Abg0xOptf2DHowgO1iVwWpb0MEMb5TKY5vUpDgnuS8kquWMbjYXM2SvLW7m+LxePZJXiq1l6S1Z4sVwQ9+0umZM0JfiGLMgqSQmcT+xu6eqBQBAcamiHBym9MY3GtK6ZzJ3wAW6YjGrJg1692kTqt4hA0Z1xoekh6AoCm7mHtd01mbCJZE1zTSFOc0ujG2WYmttex0TJmfn/ZG1Xu1/fmVr0pZ2/2HIJ3cS/g2m33W/5eyt044dn/wA4gyeC \ + && echo $X|base64 -d|inflate| gimp -nidfsc -b - \ + && mkdir scan-out scan0001-out scan0002-out scan0003-out scan0004-out scan0005-out \ + && mv -t scan-out/. scan-tmp1/*-gimp.png \ + && mv -t scan0001-out/. scan0001-tmp1/*-gimp.png \ + && mv -t scan0002-out/. scan0002-tmp1/*-gimp.png \ + && mv -t scan0003-out/. scan0003-tmp1/*-gimp.png \ + && mv -t scan0004-out/. scan0004-tmp1/*-gimp.png \ + && mv -t scan0005-out/. scan0005-tmp1/*-gimp.png \ + + -- cgit v1.1 From 797999974e4a56effb7db001038e4bd975446180 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Mon, 1 Jan 2024 17:45:48 +0100 Subject: (bkup) Add some excludes --- src/main/shell/BackupByRsync/backup.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/shell/BackupByRsync/backup.sh b/src/main/shell/BackupByRsync/backup.sh index 938b807..b3e090a 100755 --- a/src/main/shell/BackupByRsync/backup.sh +++ b/src/main/shell/BackupByRsync/backup.sh @@ -87,13 +87,16 @@ run () { --exclude="/.lesshst" \ --exclude="/.local/share" \ --exclude="/.m2/repository" \ + --exclude="/mnt" \ --exclude="/.mozilla/firefox" \ --exclude="/.NERDTreeBookmarks" \ --exclude="/.recently-used" \ + --exclude="/.sh_history" \ --exclude="/.sqlite_history" \ --exclude="/.squirrel-sql" \ --exclude="/.viking-maps" \ --exclude="/.viminfo" \ + --exclude="/.viminfo.tmp" \ --exclude="/.Xauthority" \ --exclude="/.xsession-errors" \ --exclude="/.xsession-errors.old" \ -- cgit v1.1 From 4571cc4ec1acec3dcc83c8b7122b18599aa8d480 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 5 Jan 2024 20:30:52 +0100 Subject: Add link about java try-with-resources. Update log digger. --- doc/note/links/links.txt | 3 +++ src/main/lua/paisa-logs/DigHoustonLogs.lua | 8 ++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index b8862f8..48cdca1 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -558,3 +558,6 @@ Links (Aka argument amplifiers) ## Angular is terrible - [Why angular sucks](https://medium.com/dirtyjs/why-angular-2-4-5-6-sucks-afb36567ad68) +## java try-with-resources behavior +[Exception Scenarios for Java's try-with-resources](https://dev.to/moaxcp/exception-scenarios-for-java-s-try-with-resources-63m) + diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua index c9b510b..85c21c3 100644 --- a/src/main/lua/paisa-logs/DigHoustonLogs.lua +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -36,7 +36,7 @@ function loadFilters( that ) that.filters = { { action = "drop", beforeDate = "2023-10-18 03:00:00.000", }, - { action = "drop", afterDate = "2023-10-18 15:00:00.000", }, + { action = "drop", afterDate = "2024-01-31 23:59:59.999", }, { action = "drop", level = "TRACE" }, { action = "drop", level = "DEBUG" }, @@ -68,7 +68,7 @@ function loadFilters( that ) }, -- Reported: SDCISA-13717 - -- Seen: 2023-10-18 prod + -- Seen: 2024-01-05 prod, 2023-10-18 prod { action = "drop", file = "LocalHttpServerResponse", level = "ERROR", msgPattern = "^non%-proper HttpServerResponse occured\r?\n" .."java.lang.IllegalStateException:" @@ -76,7 +76,7 @@ function loadFilters( that ) .." HTTP chunked encoding.", }, -- Reported: - -- Seen: 2023-10-18 prod + -- Seen: 2024-01-05 prod, 2023-10-18 prod { action = "drop", file = "ContextImpl", level = "ERROR", msgPattern = "Unhandled exception\n" .."java.lang.IllegalStateException: You must set the Content%-Length header to be the total size of the message body BEFORE sending" @@ -163,7 +163,7 @@ function loadFilters( that ) .." __vertx.reply.[0-9]+, repliedAddress: nsync.reregister.sync/slarti.vehicle.setup.sync.[0-9]+", }, - -- Seen: 2023-10-18 prod + -- Seen: 2024-01-05 prod, 2023-10-18 prod -- Reported: { action = "drop", file = "Utils", level = "ERROR", msgPattern = "^Exception occurred\n" .."io.vertx.core.eventbus.ReplyException: Timed out after waiting 30000.ms. for a reply. address: __vertx.reply.[0-9]+, repliedAddress: nsync.register.sync" }, -- cgit v1.1 From 802e15db91e05c2cb34c49f3bea018437593d71f Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sun, 7 Jan 2024 01:11:42 +0100 Subject: (bkup) Add excludes from tux-book --- src/main/shell/BackupByRsync/backup.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/shell/BackupByRsync/backup.sh b/src/main/shell/BackupByRsync/backup.sh index b3e090a..16c1aa2 100755 --- a/src/main/shell/BackupByRsync/backup.sh +++ b/src/main/shell/BackupByRsync/backup.sh @@ -81,6 +81,7 @@ run () { --exclude="/.config/VirtualBox/vbox-ssl-cacertificate.crt" \ --exclude="/.config/VirtualBox/VBoxSVC.log*" \ --exclude="/.config/VirtualBox/xpti.dat" \ + --exclude="/.eclipse" \ --exclude="/.gdb_history" \ --exclude="/.git-credentials" \ --exclude="/.gmrun_history" \ @@ -91,6 +92,7 @@ run () { --exclude="/.mozilla/firefox" \ --exclude="/.NERDTreeBookmarks" \ --exclude="/.recently-used" \ + --exclude="/.recoll" \ --exclude="/.sh_history" \ --exclude="/.sqlite_history" \ --exclude="/.squirrel-sql" \ @@ -107,7 +109,9 @@ run () { --exclude="/projects/forks" \ --exclude="/tmp" \ --exclude="/virtualbox-*" \ + --exclude="/VirtualBox VMs" \ --exclude="/vm-qemu" \ + --exclude="/vm-share" \ --exclude="/vmshare" \ --exclude="cee-misc-lib/external" \ --exclude="cee-misc-lib/tmp" \ -- cgit v1.1 From da8a1ea36fd9a81ce1f037fed675ccbc4f289a14 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 9 Jan 2024 17:11:24 +0100 Subject: (links) Add some links. --- doc/note/links/links.txt | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 48cdca1..1d56d08 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -233,6 +233,7 @@ Links (Aka argument amplifiers) - [thor-DasUnheilNaht](https://gitit.post.ch/projects/ISA/repos/fis-masterdata-api/pull-requests/17/overview?commentId=227703) - [thor-DerBlizHatEingeschlagen](https://gitit.post.ch/projects/ISA/repos/fis-masterdata-api/pull-requests/18/overview) - [PaISA api new enum values](https://gitit.post.ch/projects/ISA/repos/fis-control-api/pull-requests/14/overview?commentId=296012) +- [Keep APIs scope narrow as possible](https://gitit.post.ch/projects/ISA/repos/timetable-reservation-api/pull-requests/12/overview?commentId=327819) ## Performance DOES matter - "https://github.com/swisspush/gateleen/pull/456#discussion_r844865066" @@ -259,6 +260,9 @@ Links (Aka argument amplifiers) - [How to repair KISS for performance](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff) - [Houston readyness fails often](https://jira.post.ch/browse/SDCISA-13746?focusedId=1899551&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1899551) +## Errorhandling is not needed ... +- [OOM exit code 137 9 sigkill houston openshift pod](https://jira.post.ch/browse/SDCISA-13746?focusedId=1925526&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1925526) + ## Common Performance - [going fast is about doing less](https://m.youtube.com/watch?v=5rb0vvJ7NCY) - [CppCon Tuning Benchmarks clang CPUs Compilers" ](https://m.youtube.com/watch?v=nXaxk27zwlk) @@ -561,3 +565,5 @@ Links (Aka argument amplifiers) ## java try-with-resources behavior [Exception Scenarios for Java's try-with-resources](https://dev.to/moaxcp/exception-scenarios-for-java-s-try-with-resources-63m) +[About TLS in isa](https://jira.post.ch/browse/SDCISA-14330?focusedId=1925001&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1925001) + -- cgit v1.1 From 35166b56caa983087d6feb251ebb7c985544eb10 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 9 Jan 2024 17:12:01 +0100 Subject: Add helper script to kludge a CHANGELOG together from git log --- src/main/lua/git/GitflowChangelogGen.lua | 176 +++++++++++++++++++++++++++++++ 1 file changed, 176 insertions(+) create mode 100644 src/main/lua/git/GitflowChangelogGen.lua diff --git a/src/main/lua/git/GitflowChangelogGen.lua b/src/main/lua/git/GitflowChangelogGen.lua new file mode 100644 index 0000000..e4ec6c6 --- /dev/null +++ b/src/main/lua/git/GitflowChangelogGen.lua @@ -0,0 +1,176 @@ + +local log = io.stderr +local main + + +function printHelp() + io.stdout:write(" \n" + .." Helper to extract essential data from a gitflog log which potentially\n" + .." is useful to write a CHANGELOG from.\n" + .." \n" + .." Options:\n" + .." \n" + .." --since \n" + .." Ignore commits with this ISO date and older.\n" + .." \n" + .." --remote \n" + .." Name of the git remote to use. Defaults to 'upstream'.\n" + .." \n" + ) +end + + +function parseArgs( app ) + local iA = 0 + while true do iA = iA + 1 + local arg = _ENV.arg[iA] + if not arg then + break + elseif arg == "--since" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --since needs value\n")end + app.since = arg + elseif arg == "--remote" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --remote needs value\n")end + app.remoteName = arg + elseif arg == "--help" then + app.isHelp = true; return 0 + end + end + if not app.since then log:write("EINVAL: --since missing\n")return end + if not app.remoteName then app.remoteName = "upstream" end + return 0 +end + + +function readCommitHdr( app ) + --log:write("[DEBUG] parse hdr from '".. app.fullHistory:sub(app.fullHistoryRdBeg, app.fullHistoryRdBeg+256) .."...'\n") + local f, t = app.fullHistory:find("^" + .."commit ........................................[^\n]*\n" + .."Merge: [0-9a-z]+ [0-9a-z]+\n" + .."Author: [^\n]+\n" + .."Date: [^\n]+\n" + .."\n" + , app.fullHistoryRdBeg) + if not f then f, t = app.fullHistory:find("^" + .."commit ........................................[^\n]*\n" + .."Author: [^\n]+\n" + .."Date: [^\n]+\n" + .."\n" + , app.fullHistoryRdBeg) end + if not f then + assert(app.fullHistory:len() == app.fullHistoryRdBeg-1, app.fullHistory:len()..", "..app.fullHistoryRdBeg) + app.parseFn = false + return + end + app.commitHdr = assert(app.fullHistory:sub(f, t-1)) + --log:write("hdrBeginsWith '"..(app.commitHdr:sub(1, 32)).."...'\n") + app.fullHistoryRdBeg = t + 1 + --log:write("hdr parsed. rdCursr now points to '".. app.fullHistory:sub(app.fullHistoryRdBeg, app.fullHistoryRdBeg+16) .."...'\n") + app.parseFn = assert(readCommitMsg) +end + + +function readCommitMsg( app ) + local idxOfC = app.fullHistoryRdBeg + local chrPrev = false + while true do idxOfC = idxOfC + 1 + local chr = app.fullHistory:byte(idxOfC) + --log:write("CHR '"..tostring(app.fullHistory:sub(idxOfC, idxOfC)).."'\n") + if (chr == 0x63) and chrPrev == 0x0A then + idxOfC = idxOfC - 1 + break -- LF followed by 'c' (aka 'commit') found + elseif not chr then + idxOfC = idxOfC - 1 + break + else + chrPrev = assert(chr) + end + end + local mtch = app.fullHistory:sub(app.fullHistoryRdBeg, idxOfC - 1) + assert(mtch) + while mtch:byte(mtch:len()) == 0x0A do mtch = mtch:sub(1, -2) end + mtch = mtch:gsub("\n ", "\n"):gsub("^ ", "") + app.commitMsg = mtch + app.fullHistoryRdBeg = idxOfC + 1 + app.parseFn = readCommitHdr + --log:write("msg parsed. rdCursr now points to '".. app.fullHistory:sub(app.fullHistoryRdBeg, app.fullHistoryRdBeg+16) .."...'\n") + table.insert(app.commits, { + hdr = assert(app.commitHdr), + msg = assert(app.commitMsg), + }) +end + + +function run( app ) + local snk = io.stdout + -- Collect input + local git = "git log --date-order --first-parent --decorate --since \"".. app.since.."\"" + .." \"".. app.remoteName .."/master\"" + .." \"".. app.remoteName .."/develop\"" + log:write("[DEBUG] ".. git .."\n") + local git = io.popen(git) + while true do + local buf = git:read(1<<16) + if not buf then break end + --io.stdout:write(buf) + table.insert(app.fullHistory, buf) + end + -- Parse raw commits + app.fullHistory = table.concat(app.fullHistory) + app.parseFn = assert(readCommitHdr) + while app.parseFn do app.parseFn(app) end + -- Prepare output + local prevDate = "0000-00-00" + local version, prevVersion = "v_._._", false + local dateEntry = false + local entries = {} + for k, v in ipairs(app.commits) do + local date = assert(v.hdr:match("\nDate: +([0-9-]+) ")) + local author = assert(v.hdr:match("\nAuthor: +([^\n]+)\n")) + local prNr, short = v.msg:match("Pull request #(%d+): ([^\n]+)\n") + prevVersion = version + _, version = v.hdr:match("^([^\n]+)\n"):match("tag: ([a-z]+)-([^,]+)[,)]") + if not version then version = prevVersion end + + if version ~= prevVersion or not dateEntry then + if dateEntry then table.insert(entries, dateEntry) end + dateEntry = { + txt = date .." - ".. version .."\n\nResolved issues:\n\n" + } + prevDate = date + end + if prNr then + dateEntry.txt = dateEntry.txt .. short .." (PR ".. prNr ..")\n" + else + dateEntry.txt = dateEntry.txt .. v.msg .."\n" + end + end + if dateEntry then table.insert(entries, dateEntry) end + -- output + for k, v in ipairs(entries) do + snk:write("\n\n") + snk:write(v.txt) + snk:write("\n") + end +end + + +function main() + local app = { + since = false, + remoteName = false, + fullHistory = {}, + fullHistoryRdBeg = 1, + commits = {}, + parseFn = false, + } + if parseArgs(app) ~= 0 then os.exit(1) end + if app.isHelp then printHelp() return end + run(app) +end + + +main() + -- cgit v1.1 From 652d2d6b3e0fdf916a0c0107a86013821e46a730 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 10 Jan 2024 17:53:59 +0100 Subject: Add links. Fix cli bug in GitflowChangelogGen.lua. Add LogStatistics.lua --- doc/note/links/links.txt | 3 +- src/main/lua/git/GitflowChangelogGen.lua | 4 +- src/main/lua/paisa-jvm-memLeak/LogStatistics.lua | 112 +++++++++++++++++++++++ 3 files changed, 116 insertions(+), 3 deletions(-) create mode 100644 src/main/lua/paisa-jvm-memLeak/LogStatistics.lua diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 1d56d08..60deea4 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -221,7 +221,8 @@ Links (Aka argument amplifiers) - "https://github.com/swisspush/gateleen/pull/426#discussion_r813752075" - "https://github.com/swisspush/gateleen/blob/v1.1.61/gateleen-kafka/src/main/java/org/swisspush/gateleen/kafka/KafkaMessageSender.java#L21" - [How to use java fancy streams](https://m.youtube.com/watch?v=x5akmCWgGY0) -- think please ... "https://m.youtube.com/watch?v=hSfylUXhpkA" +- [think please ...](https://m.youtube.com/watch?v=hSfylUXhpkA) +- [java streams are ugly](https://gitit.post.ch/projects/ISA/repos/nsync/pull-requests/55/overview?commentId=328210) ## The Only way to Format Dates ISO 8601 - "https://xkcd.com/1179/" diff --git a/src/main/lua/git/GitflowChangelogGen.lua b/src/main/lua/git/GitflowChangelogGen.lua index e4ec6c6..505fd3b 100644 --- a/src/main/lua/git/GitflowChangelogGen.lua +++ b/src/main/lua/git/GitflowChangelogGen.lua @@ -28,11 +28,11 @@ function parseArgs( app ) break elseif arg == "--since" then iA = iA + 1; arg = _ENV.arg[iA] - if not arg then log:write("EINVAL: --since needs value\n")end + if not arg then log:write("EINVAL: --since needs value\n")return end app.since = arg elseif arg == "--remote" then iA = iA + 1; arg = _ENV.arg[iA] - if not arg then log:write("EINVAL: --remote needs value\n")end + if not arg then log:write("EINVAL: --remote needs value\n")return end app.remoteName = arg elseif arg == "--help" then app.isHelp = true; return 0 diff --git a/src/main/lua/paisa-jvm-memLeak/LogStatistics.lua b/src/main/lua/paisa-jvm-memLeak/LogStatistics.lua new file mode 100644 index 0000000..cbd84b2 --- /dev/null +++ b/src/main/lua/paisa-jvm-memLeak/LogStatistics.lua @@ -0,0 +1,112 @@ + +local newLogParser = require("PaisaLogParser").newLogParser + +local inn, out, log = io.stdin, io.stdout, io.stderr + +local main, printHelp, parseArgs, run, onLogEntry, printStats + + +function printHelp( app ) + io.stdout:write(" \n" + .." TODO write help page\n" + .." \n") +end + + +function parseArgs( app ) + local arg = _ENV.arg[1] + if arg == "--help" then app.isHelp = true return 0 end + if arg ~= "--yolo" then log:write("EINVAL\n")return end + return 0 +end + + +function onLogEntry( entry, app ) + local isTheEntryWeReSearching = false + -- HOT! + --or (entry.file == "ContextImpl" and entry.msg:find("IllegalStateException: null")) + -- HOT! + or (entry.file == "HttpHeaderUtil" and entry.msg:find("Keep.Alive. values do not match timeout.42 .. timeout.120 for request ")) + -- HOT! + --or (entry.msg:find("timetable")) + -- nope + --or (entry.file == "ContextImpl" and entry.msg:find("IllegalStateException: You must set the Content%-Length header")) + -- nope + --or (entry.file == "LocalHttpServerResponse" and entry.msg:find("non-proper HttpServerResponse occured", 0, true)) + -- TODO + local instantKey = entry.date + local instant = app.instants[instantKey] + if not instant then + instant = { + date = entry.date, + count = 0, + } + app.instants[instantKey] = instant + end + if isTheEntryWeReSearching then + instant.count = instant.count + 1 + end +end + + +function printStats( app ) + -- Arrange data + local numGroups = 0 + local groupSet = {} + local countMax = 1 + for date, instant in pairs(app.instants) do + assert(date == instant.date) + local key = date:sub(1, 15) + local group = groupSet[key] + if not group then + numGroups = numGroups + 1 + group = { key = key, date = date, count = 0, } + groupSet[key] = group + end + group.count = group.count + instant.count + if countMax < group.count then countMax = group.count end + end + local groupArr = {} + for _, group in pairs(groupSet) do + table.insert(groupArr, group) + end + table.sort(groupArr, function( a, b )return a.key < b.key end) + -- Plot + out:write("\n") + out:write(string.format(" Splitted into %9d groups\n", numGroups)) + out:write(string.format(" Peak value %9d num log entries\n", countMax)) + out:write("\n") + local fullBar = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + for _, group in pairs(groupArr) do + out:write(string.format("%s... |", group.key)) + local len = math.floor(group.count / countMax * fullBar:len()) + out:write(fullBar:sub(1, len)) + out:write("\n") + end +end + + +function run( app ) + app.logParser = newLogParser{ + cls = app, + patternV1 = "DATE STAGE SERVICE LEVEL FILE - MSG", + onLogEntry = onLogEntry, + } + app.logParser:tryParseLogs() + printStats(app) +end + + +function main() + local app = { + isHelp = false, + logParser = false, + instants = {}, + } + if parseArgs(app) ~= 0 then os.exit(1) end + if app.isHelp then printHelp() return end + run(app) +end + + +main() -- cgit v1.1 From 48836f2ee023c44e70eed99a63bbec4caf2c6d6e Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 12 Jan 2024 17:37:22 +0100 Subject: Add link. Cleanup MvnCentralDepScan.lua --- doc/note/links/links.txt | 1 + src/main/lua/git/GitflowChangelogGen.lua | 4 +++- src/main/lua/maven/MvnCentralDepScan.lua | 30 ++++++++---------------------- 3 files changed, 12 insertions(+), 23 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 60deea4..627d0ac 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -260,6 +260,7 @@ Links (Aka argument amplifiers) - [http cache disable](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/1/overview?commentId=287832) - [How to repair KISS for performance](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff) - [Houston readyness fails often](https://jira.post.ch/browse/SDCISA-13746?focusedId=1899551&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1899551) +- [Just one message per minute](https://gitit.post.ch/projects/ISA/repos/eagle/pull-requests/375/overview?commentId=330543) ## Errorhandling is not needed ... - [OOM exit code 137 9 sigkill houston openshift pod](https://jira.post.ch/browse/SDCISA-13746?focusedId=1925526&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1925526) diff --git a/src/main/lua/git/GitflowChangelogGen.lua b/src/main/lua/git/GitflowChangelogGen.lua index 505fd3b..519d12b 100644 --- a/src/main/lua/git/GitflowChangelogGen.lua +++ b/src/main/lua/git/GitflowChangelogGen.lua @@ -5,7 +5,7 @@ local main function printHelp() io.stdout:write(" \n" - .." Helper to extract essential data from a gitflog log which potentially\n" + .." Helper to extract essential data from a gitflow log which potentially\n" .." is useful to write a CHANGELOG from.\n" .." \n" .." Options:\n" @@ -36,6 +36,8 @@ function parseArgs( app ) app.remoteName = arg elseif arg == "--help" then app.isHelp = true; return 0 + else + log:write("EINVAL: ".. arg .."\n")return end end if not app.since then log:write("EINVAL: --since missing\n")return end diff --git a/src/main/lua/maven/MvnCentralDepScan.lua b/src/main/lua/maven/MvnCentralDepScan.lua index 5322bc0..7f71afa 100644 --- a/src/main/lua/maven/MvnCentralDepScan.lua +++ b/src/main/lua/maven/MvnCentralDepScan.lua @@ -941,9 +941,6 @@ function mod.exportParentsLatest(app) local stmt = app.stmtCache[stmtStr] if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end local rs = stmt:execute() - out:write("h;Title;Parent relations (latest only)\n") - out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n") - out:write("c;GroupId;ArtifactId;Version;ParentGid;ParentAid;ParentVersion\n") -- Need to filter out the older artifacts. local all = {} while rs:next() do @@ -954,18 +951,14 @@ function mod.exportParentsLatest(app) if diff > 0 then -- existing is newer. Keep it and ignore newer one. goto nextRecord else -- Either no entry yet or found a newer one. - local entry = { gid=false, aid=false, ver=false, pgid=false, paid=false, pver=false } - entry.gid = gid - entry.aid = aid - entry.ver = ver - entry.pgid = rs:value(4) - entry.paid = rs:value(5) - entry.pver = rs:value(6) - all[key] = entry + all[key] = { gid=gid, aid=aid, ver=ver, pgid=rs:value(4), paid=rs:value(5), pver=rs:value(6) } end ::nextRecord:: end -- Print + out:write("h;Title;Parent relations (latest only)\n") + out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n") + out:write("c;GroupId;ArtifactId;Version;ParentGid;ParentAid;ParentVersion\n") for _, entry in pairs(all) do out:write("r;".. entry.gid ..";".. entry.aid ..";".. entry.ver ..";".. entry.pgid ..";".. entry.paid ..";".. entry.pver .."\n") @@ -1031,9 +1024,6 @@ function mod.exportDepsLatest(app) local stmt = app.stmtCache[stmtStr] if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end local rs = stmt:execute() - out:write("h;Title;Dependencies (of latest only)\n") - out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n") - out:write("c;GroupId;ArtifactId;Version;Dependency GID;Dependency AID;Dependency Version\n") -- Need to filter out the older artifacts. local all = {} local entry, key, gid, aid, ver, diff @@ -1046,18 +1036,14 @@ function mod.exportDepsLatest(app) if diff > 0 then -- existing is newer. Keep it and ignore newer one. goto nextRecord else -- Either no entry yet or found a newer one. - local entry = { gid=false, aid=false, ver=false, dgid=false, daid=false, dver=false } - entry.gid = gid - entry.aid = aid - entry.ver = ver - entry.dgid = rs:value(4) - entry.daid = rs:value(5) - entry.dver = rs:value(6) - all[key] = entry + all[key] = { gid=gid, aid=aid, ver=ver, dgid=rs:value(4), daid=rs:value(5), dver=rs:value(6) } end goto nextRecord ::endFiltering:: -- Print + out:write("h;Title;Dependencies (of latest only)\n") + out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n") + out:write("c;GroupId;ArtifactId;Version;Dependency GID;Dependency AID;Dependency Version\n") for _, entry in pairs(all) do out:write("r;".. entry.gid ..";".. entry.aid ..";".. entry.ver ..";".. entry.dgid ..";".. entry.daid ..";".. entry.dver .."\n") -- cgit v1.1 From a43e8cec95e4158426331e01287387f41c65f440 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 12 Jan 2024 17:41:53 +0100 Subject: Disable some DigHoustonLogs rules. --- src/main/lua/paisa-logs/DigHoustonLogs.lua | 281 +++++++++++++++-------------- 1 file changed, 143 insertions(+), 138 deletions(-) diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua index 85c21c3..ebe7afe 100644 --- a/src/main/lua/paisa-logs/DigHoustonLogs.lua +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -43,29 +43,29 @@ function loadFilters( that ) { action = "drop", level = "INFO" }, { action = "drop", level = "WARN" }, - -- Seen: 2023-10-18 prod - { action = "drop", file = "ContextImpl", level = "ERROR", - msgEquals = "Unhandled exception\njava.lang.NullPointerException: No null handler accepted", - stackPattern = "^" - .."\tat java.util.Objects.requireNonNull.Objects.java:246. ~..:..\n" - .."\tat io.vertx.core.impl.future.FutureImpl.onComplete.FutureImpl.java:132. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" - .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete.PromiseImpl.java:23. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" - .."\tat io.vertx.core.file.impl.FileSystemImpl.delete.FileSystemImpl.java:290. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" - .."\tat org.swisspush.reststorage.FilePutter.FileCleanupManager.deleteFile.FilePutter.java:218. ~.rest.storage.[0-9.]+.jar:..\n" - .."\tat org.swisspush.reststorage.FilePutter.FileCleanupManager.lambda.cleanupFile.0.FilePutter.java:192. ~.rest.storage.[0-9.]+.jar:..\n" - .."\tat io.vertx.core.impl.future.FutureImpl.3.onSuccess.FutureImpl.java:141. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" - }, - - -- Seen: 2023-10-18 prod - -- TODO open PR to add some logging so we have a chance to find submarine. - { action = "drop", file = "ContextImpl", level = "ERROR", - msgEquals = "Unhandled exception\njava.lang.IllegalStateException: Response head already sent", - stackPattern = "^" - .."\tat io.vertx.core.http.impl.Http1xServerResponse.checkHeadWritten.Http1xServerResponse.java:684. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" - .."\tat io.vertx.core.http.impl.Http1xServerResponse.setStatusCode.Http1xServerResponse.java:153. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" - .."\tat org.swisspush.gateleen.routing.Forwarder.lambda.getAsyncHttpClientResponseHandler.7.Forwarder.java:430. ~.gateleen.routing.[0-9.]+.jar:..\n" - .."\tat io.vertx.core.impl.future.FutureImpl.3.onFailure.FutureImpl.java:153. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", - }, +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "ContextImpl", level = "ERROR", +-- msgEquals = "Unhandled exception\njava.lang.NullPointerException: No null handler accepted", +-- stackPattern = "^" +-- .."\tat java.util.Objects.requireNonNull.Objects.java:246. ~..:..\n" +-- .."\tat io.vertx.core.impl.future.FutureImpl.onComplete.FutureImpl.java:132. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" +-- .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete.PromiseImpl.java:23. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" +-- .."\tat io.vertx.core.file.impl.FileSystemImpl.delete.FileSystemImpl.java:290. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" +-- .."\tat org.swisspush.reststorage.FilePutter.FileCleanupManager.deleteFile.FilePutter.java:218. ~.rest.storage.[0-9.]+.jar:..\n" +-- .."\tat org.swisspush.reststorage.FilePutter.FileCleanupManager.lambda.cleanupFile.0.FilePutter.java:192. ~.rest.storage.[0-9.]+.jar:..\n" +-- .."\tat io.vertx.core.impl.future.FutureImpl.3.onSuccess.FutureImpl.java:141. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" +-- }, +-- +-- -- Seen: 2023-10-18 prod +-- -- TODO open PR to add some logging so we have a chance to find submarine. +-- { action = "drop", file = "ContextImpl", level = "ERROR", +-- msgEquals = "Unhandled exception\njava.lang.IllegalStateException: Response head already sent", +-- stackPattern = "^" +-- .."\tat io.vertx.core.http.impl.Http1xServerResponse.checkHeadWritten.Http1xServerResponse.java:684. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" +-- .."\tat io.vertx.core.http.impl.Http1xServerResponse.setStatusCode.Http1xServerResponse.java:153. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" +-- .."\tat org.swisspush.gateleen.routing.Forwarder.lambda.getAsyncHttpClientResponseHandler.7.Forwarder.java:430. ~.gateleen.routing.[0-9.]+.jar:..\n" +-- .."\tat io.vertx.core.impl.future.FutureImpl.3.onFailure.FutureImpl.java:153. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", +-- }, -- Reported: SDCISA-13717 -- Seen: 2024-01-05 prod, 2023-10-18 prod @@ -82,10 +82,10 @@ function loadFilters( that ) .."java.lang.IllegalStateException: You must set the Content%-Length header to be the total size of the message body BEFORE sending" .." any data if you are not using HTTP chunked encoding.", }, - -- Seen: 2023-10-18 - -- Opened nsync PR 49 as a first counter measure. - { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.NullPointerException: null", - stackStartsWith = "\tat org.swisspush.nsync.multiget.MultiGetServer.lambda$tryLaunchOneRequest$2(MultiGetServer.java:107) ~[nsync-0.6.0.jar:?]" }, +-- -- Seen: 2023-10-18 +-- -- Opened nsync PR 49 as a first counter measure. +-- { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.NullPointerException: null", +-- stackStartsWith = "\tat org.swisspush.nsync.multiget.MultiGetServer.lambda$tryLaunchOneRequest$2(MultiGetServer.java:107) ~[nsync-0.6.0.jar:?]" }, -- Bunch of nonsense !ERROR!s which happen all the time as eddies go offline. @@ -102,61 +102,61 @@ function loadFilters( that ) msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles" .." The timeout period of 30000ms has been exceeded while executing POST /from.houston/%d+/eagle/nsync/v1/push/" .."trillian.phonebooks.affiliated.planning.area.%d+.vehicles for server eddie%d+:7012", }, - -- Seen: 2023-10-18 prod - { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" - .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip The timeout period of 30000ms has been exceeded" - .." while executing PUT /houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip for server localhost:9089", }, - -- Seen: 2023-10-18 prod - { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" - .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip Timeout$" }, - -- Seen: 2023-10-18 prod - -- I guess this happens if an eddie tries to put his "backup.zip" via shaky connection. - { action = "drop", file = "FilePutter", level = "ERROR", - msgEquals = "Put file failed:\nio.vertx.core.VertxException: Connection was closed", }, - -- Seen: 2023-10-18 prod +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" +-- .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip The timeout period of 30000ms has been exceeded" +-- .." while executing PUT /houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip for server localhost:9089", }, +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" +-- .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip Timeout$" }, +-- -- Seen: 2023-10-18 prod +-- -- I guess this happens if an eddie tries to put his "backup.zip" via shaky connection. +-- { action = "drop", file = "FilePutter", level = "ERROR", +-- msgEquals = "Put file failed:\nio.vertx.core.VertxException: Connection was closed", }, + -- Seen: 2024-01-10 prod, 2023-10-18 prod -- There are a whole bunch of related errors behind this filter which AFAICT all relate to shaky eddie connections. { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+" .." The timeout period of 30000ms has been exceeded while executing [DEGLOPSTU]+ /from.houston/%d+/eagle/[^ ]+ for server eddie%d+:7012$", }, - -- Seen: 2023-10-18 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+ Connection was closed$", }, - - -- Seen: 2023-10-18 prod - { action = "drop", file = "ConnectionBase", level = "ERROR", msgEquals = "Connection reset by peer", }, +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "Forwarder", level = "ERROR", +-- msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+ Connection was closed$", }, +-- +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "ConnectionBase", level = "ERROR", msgEquals = "Connection reset by peer", }, +-- +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "EventBusBridgeImpl", level = "ERROR", msgEquals = "SockJSSocket exception\nio.vertx.core.VertxException: Connection was closed", }, - -- Seen: 2023-10-18 prod - { action = "drop", file = "EventBusBridgeImpl", level = "ERROR", msgEquals = "SockJSSocket exception\nio.vertx.core.VertxException: Connection was closed", }, - - -- Seen: 2023-10-18 prod + -- Seen: 2024-01-05 prod, 2023-10-18 prod -- Reported: TODO link existing issue here { action = "drop", file = "HttpHeaderUtil", level = "ERROR", msgPattern = "Keep%-Alive%} values do not match timeout=42 != timeout=120 for request /googleplex/.*", }, - -- Seen: 2023-10-18 prod - -- Reported: - { action = "drop", file = "Utils", level = "ERROR", - msgPattern = "^Exception occurred\nio.vertx.core.eventbus.ReplyException: Sync failed.\n" - .."{\n" - ..' "countIndexQueries" : 1,\n' - ..' "countSentBytes" : 119,\n' - ..' "countReceivedBytes" : 0,\n' - ..' "countMultiGetRequests" : 0,\n' - ..' "countPuts" : 0,\n' - ..' "countDeletes" : 0,\n' - ..' "durationSeconds" : 0.0,\n' - ..' "iterationDepth" : 0\n' - .."}", }, - - -- Seen: 2023-10-18 prod - -- Reported: - { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: null", - stackPattern = "^" - .."\tat org.swisspush.gateleen.core.http.LocalHttpClientRequest.connection.LocalHttpClientRequest.java:754. ~.gateleen.core.[0-9.]+.jar:..\n" - .."\tat org.swisspush.gateleen.routing.Forwarder.1.lambda.handle.0.Forwarder.java:362. ~.gateleen.routing.[0-9.]+.jar:..\n" - .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", - }, +-- -- Seen: 2023-10-18 prod +-- -- Reported: +-- { action = "drop", file = "Utils", level = "ERROR", +-- msgPattern = "^Exception occurred\nio.vertx.core.eventbus.ReplyException: Sync failed.\n" +-- .."{\n" +-- ..' "countIndexQueries" : 1,\n' +-- ..' "countSentBytes" : 119,\n' +-- ..' "countReceivedBytes" : 0,\n' +-- ..' "countMultiGetRequests" : 0,\n' +-- ..' "countPuts" : 0,\n' +-- ..' "countDeletes" : 0,\n' +-- ..' "durationSeconds" : 0.0,\n' +-- ..' "iterationDepth" : 0\n' +-- .."}", }, +-- +-- -- Seen: 2023-10-18 prod +-- -- Reported: +-- { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: null", +-- stackPattern = "^" +-- .."\tat org.swisspush.gateleen.core.http.LocalHttpClientRequest.connection.LocalHttpClientRequest.java:754. ~.gateleen.core.[0-9.]+.jar:..\n" +-- .."\tat org.swisspush.gateleen.routing.Forwarder.1.lambda.handle.0.Forwarder.java:362. ~.gateleen.routing.[0-9.]+.jar:..\n" +-- .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", +-- }, - -- Seen: 2023-10-18 prod + -- Seen: 2024-01-05 prod, 2023-10-18 prod -- Reported: { action = "drop", file = "Utils", level = "ERROR", msgPattern = "^Exception occurred\nio.vertx.core.eventbus.ReplyException: Timed out after waiting 30000.ms. for a reply. address:" @@ -168,64 +168,65 @@ function loadFilters( that ) { action = "drop", file = "Utils", level = "ERROR", msgPattern = "^Exception occurred\n" .."io.vertx.core.eventbus.ReplyException: Timed out after waiting 30000.ms. for a reply. address: __vertx.reply.[0-9]+, repliedAddress: nsync.register.sync" }, - -- Seen: 2023-10-18 prod - { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", - msgEquals = "Connection was closed\nio.vertx.core.VertxException: Connection was closed", }, - - -- Seen: 2023-10-18 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "^..... ................................ http://bistr:8080/bistr/vending/accounting/v1/information/lastSessionEnd Connection was closed$", }, - - -- Seen: 2023-10-18 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "..... ................................ http://bob:8080/bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" - .." The timeout period of 30000ms has been exceeded while executing PUT /bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" - .." for server bob:8080", }, - - -- Seen: 2023-10-18 prod - { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.IllegalStateException: null", - stackStartsWith = "" - .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.checkEnded(HttpClientResponseImpl.java:150) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.endHandler(HttpClientResponseImpl.java:172) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler$7(Forwarder.java:476) ~[gateleen-routing-1.3.25.jar:?]\n" - .."\tat io.vertx.core.impl.future.FutureImpl$3.onSuccess(FutureImpl.java:141) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.FutureBase.emitSuccess(FutureBase.java:60) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.FutureImpl.addListener(FutureImpl.java:196) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.PromiseImpl.addListener(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.FutureImpl.onComplete(FutureImpl.java:164) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.http.impl.HttpClientRequestBase.response(HttpClientRequestBase.java:240) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.http.HttpClientRequest.send(HttpClientRequest.java:330) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat org.swisspush.gateleen.routing.Forwarder$1.lambda$handle$1(Forwarder.java:377) ~[gateleen-routing-1.3.25.jar:?]\n" - .."\tat org.swisspush.gateleen.core.http.BufferBridge.lambda$pump$0(BufferBridge.java:43) ~[gateleen-core-1.3.25.jar:?]\n" - .."\tat io.vertx.core.impl.AbstractContext.dispatch(AbstractContext.java:100) ~[vertx-core-4.2.1.jar:4.2.1]\n", - }, - - -- Seen: 2023-10-18 prod - -- TODO Push issue to my backlog to fix this. - { action = "drop", file = "ContextImpl", level = "ERROR", - msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.", - stackPattern = "^" - .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:41. ~.gateleen.core.[0-9.]+.jar:..\n" - .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:24. ~.gateleen.core.[0-9.]+.jar:..\n" - .."\tat org.swisspush.gateleen.logging.LoggingWriteStream.drainHandler.LoggingWriteStream.java:73. ~.gateleen.logging.[0-9.]+.jar:..\n" - .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:95. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" - .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:39. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" - .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.4.Forwarder.java:494. ~.gateleen.routing.[0-9.]+.jar:..\n" - .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.5.Forwarder.java:503. ~.gateleen.routing.[0-9.]+.jar:..\n" - .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", - }, - - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "^..... ................................ http://thought:8080/thought/vehicleoperation/recording/v1/events The timeout period of 60000ms has been exceeded while executing PUT /thought/vehicleoperation/recording/v1/events for server thought:8080$", - }, - - -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. - { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" - .." http://eddie%d+:7012/from.houston/%d+/eagle/vending/accounting/v1/users/%d+/years/%d+/months/%d%d/account Connection was closed$", }, - -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. - { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" - .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection was closed$", }, +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", +-- msgEquals = "Connection was closed\nio.vertx.core.VertxException: Connection was closed", }, +-- +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "Forwarder", level = "ERROR", +-- msgPattern = "^..... ................................ http://bistr:8080/bistr/vending/accounting/v1/information/lastSessionEnd Connection was closed$", }, +-- +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "Forwarder", level = "ERROR", +-- msgPattern = "..... ................................ http://bob:8080/bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" +-- .." The timeout period of 30000ms has been exceeded while executing PUT /bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" +-- .." for server bob:8080", }, +-- +-- -- Seen: 2023-10-18 prod +-- { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.IllegalStateException: null", +-- stackStartsWith = "" +-- .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.checkEnded(HttpClientResponseImpl.java:150) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.endHandler(HttpClientResponseImpl.java:172) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler$7(Forwarder.java:476) ~[gateleen-routing-1.3.25.jar:?]\n" +-- .."\tat io.vertx.core.impl.future.FutureImpl$3.onSuccess(FutureImpl.java:141) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat io.vertx.core.impl.future.FutureBase.emitSuccess(FutureBase.java:60) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat io.vertx.core.impl.future.FutureImpl.addListener(FutureImpl.java:196) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat io.vertx.core.impl.future.PromiseImpl.addListener(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat io.vertx.core.impl.future.FutureImpl.onComplete(FutureImpl.java:164) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat io.vertx.core.http.impl.HttpClientRequestBase.response(HttpClientRequestBase.java:240) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat io.vertx.core.http.HttpClientRequest.send(HttpClientRequest.java:330) ~[vertx-core-4.2.1.jar:4.2.1]\n" +-- .."\tat org.swisspush.gateleen.routing.Forwarder$1.lambda$handle$1(Forwarder.java:377) ~[gateleen-routing-1.3.25.jar:?]\n" +-- .."\tat org.swisspush.gateleen.core.http.BufferBridge.lambda$pump$0(BufferBridge.java:43) ~[gateleen-core-1.3.25.jar:?]\n" +-- .."\tat io.vertx.core.impl.AbstractContext.dispatch(AbstractContext.java:100) ~[vertx-core-4.2.1.jar:4.2.1]\n", +-- }, +-- +-- -- Seen: 2023-10-18 prod +-- -- TODO Push issue to my backlog to fix this. +-- { action = "drop", file = "ContextImpl", level = "ERROR", +-- msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.", +-- stackPattern = "^" +-- .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:41. ~.gateleen.core.[0-9.]+.jar:..\n" +-- .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:24. ~.gateleen.core.[0-9.]+.jar:..\n" +-- .."\tat org.swisspush.gateleen.logging.LoggingWriteStream.drainHandler.LoggingWriteStream.java:73. ~.gateleen.logging.[0-9.]+.jar:..\n" +-- .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:95. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" +-- .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:39. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" +-- .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.4.Forwarder.java:494. ~.gateleen.routing.[0-9.]+.jar:..\n" +-- .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.5.Forwarder.java:503. ~.gateleen.routing.[0-9.]+.jar:..\n" +-- .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", +-- }, +-- +-- { action = "drop", file = "Forwarder", level = "ERROR", +-- msgPattern = "^..... ................................ http://thought:8080/thought/vehicleoperation/recording/v1/events The timeout period of 60000ms has been exceeded while executing PUT /thought/vehicleoperation/recording/v1/events for server thought:8080$", +-- }, +-- +-- -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. +-- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" +-- .." http://eddie%d+:7012/from.houston/%d+/eagle/vending/accounting/v1/users/%d+/years/%d+/months/%d%d/account Connection was closed$", }, +-- -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. +-- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" +-- .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection was closed$", }, + -- Seen 2024-01-10 prod -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/query.index The timeout period of 30000ms has been exceeded while executing" @@ -233,8 +234,8 @@ function loadFilters( that ) -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" .." http://eddie%d+:7012/from.houston/%d+/eagle/timetable/notification/v1/planningareas/%d+/notifications/%x+ Connection was closed$", }, - -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. - { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection reset by peer$", }, +-- -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. +-- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection reset by peer$", }, ---- TODO Thought timeout? Can happen. But how often is ok? ---- HINT: Occurred 15 times in 6 hrs (avg 1x per 24min) (2021-09-17_12:00 to 2021-09-17_18:00) @@ -306,11 +307,15 @@ function loadFilters( that ) --{ action = "drop", file = "RedisQues", level = "WARN", -- msgPattern = "Registration for queue .+ has changed to null", }, - ---- Reported: SDCISA-10973 - ---- Seen: 2023-10-18 prod. - --{ action = "drop", file = "HttpClientRequestImpl", level = "ERROR", - -- msgPattern = "The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+" - -- .."/vehicle/backup/v1/executions/[0-9]+/backup.zip for server localhost:9089", }, +-- -- Reported: SDCISA-10973 +-- -- Seen: 2023-10-18 prod. +-- { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", +-- msgPattern = "The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+" +-- .."/vehicle/backup/v1/executions/[0-9]+/backup.zip for server localhost:9089", }, + + -- Seen 2024-01-10 prod + { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", + msgPattern = "The timeout period of 30000ms has been exceeded while executing POST /from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles for server eddie%d+:7012" }, --{ action = "drop", file = "Forwarder", level = "ERROR", -- msgPattern = "[%a-z0-9]+ [a-z0-9]+ http://eddie.....:7012/from%-houston/[^/]+/eagle/nsync/v1/push/trillian" -- cgit v1.1 From 9be648594d4861541a7fa4c3c72cfe16650761e3 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 16 Jan 2024 17:08:20 +0100 Subject: Begun to write native mvn-launch.c due to annoying policies. --- doc/note/links/links.txt | 2 + src/main/c/postshit/launch/mvn/launchr.c | 5 ++ src/main/c/postshit/launch/mvn/mvn-launch.c | 130 ++++++++++++++++++++++++++++ src/main/c/postshit/launch/mvn/windoof.h | 79 +++++++++++++++++ 4 files changed, 216 insertions(+) create mode 100644 src/main/c/postshit/launch/mvn/launchr.c create mode 100644 src/main/c/postshit/launch/mvn/mvn-launch.c create mode 100644 src/main/c/postshit/launch/mvn/windoof.h diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 627d0ac..4bb0c5c 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -278,6 +278,7 @@ Links (Aka argument amplifiers) ## How to API design - "https://m.youtube.com/watch?v=2xgplCQS1bY" - [How to migrate an API properly via migration path](https://wikit.post.ch/x/pK1WJQ) +- [What "Software Architect" means](https://m.youtube.com/watch?v=rPJfadFSCyQ&t=900) ## Posix c API design - "https://lucumr.pocoo.org/2013/8/18/beautiful-native-libraries/" @@ -443,6 +444,7 @@ Links (Aka argument amplifiers) - "https://medium.com/dinahmoe/escape-dependency-hell-b289de537403" - "https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/119/overview?commentId=46245" - [Stop Ductaping crap together](https://devrant.com/rants/5107044) +- [JavaMelody OutOfMemory](https://wikit.post.ch/display/ISA/God+UI+ohne+Inhalt?focusedCommentId=1439580947#comment-1439580947) ## Input validation - [WontDo](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/512/overview?commentId=222672) diff --git a/src/main/c/postshit/launch/mvn/launchr.c b/src/main/c/postshit/launch/mvn/launchr.c new file mode 100644 index 0000000..0c7db07 --- /dev/null +++ b/src/main/c/postshit/launch/mvn/launchr.c @@ -0,0 +1,5 @@ +/* + + TODO move shared launcher stuff here. + +*/ diff --git a/src/main/c/postshit/launch/mvn/mvn-launch.c b/src/main/c/postshit/launch/mvn/mvn-launch.c new file mode 100644 index 0000000..0766c14 --- /dev/null +++ b/src/main/c/postshit/launch/mvn/mvn-launch.c @@ -0,0 +1,130 @@ +/* + + Shitty policies require shitty workarounds. Standard maven ships with a 'cmd' + file for its execution. But as some shiny 'security' policies forbid + execution of 'cmd' files, we need to waste our time writing stuff like this + instead doing our work. Grrr... + + ${CC:?} -Wall -Werror -fmax-errors=3 -o build/bin/mvn-launch.exe src/main/c/postshit/launch/mvn/mvn-launch.c -Isrc/main/c/postshit/launch/mvn + +*/ + +#include + +#include +#include + +#define LOGDBG(...) fprintf(stderr, __VA_ARGS__) + + +static int appendArg( char*cmdline, int*cmdline_off, int cmdline_cap, const char*newArg, int newArg_len ){ + #define cmdline_off (*cmdline_off) + int err; + if( cmdline_cap < cmdline_off + newArg_len + sizeof" \"\"" ){ + fprintf(stderr, "ENOBUFS: %s cmdline too long\n", strrchr(__FILE__,'/')+1); + err = -ENOBUFS; goto endFn; + } + cmdline[cmdline_off++] = ' '; + cmdline[cmdline_off++] = '"'; + for(; newArg[0] != '\0' ; ++newArg ){ + if( newArg[0] == '"' ){ + fprintf(stderr, "ENOTSUP: %s not impl to handle quotes inside args (TODO_H0cCAJtBAg)\n", + strrchr(__FILE__,'/')); + err = -ENOTSUP; goto endFn; + } + cmdline[cmdline_off++] = newArg[0]; + } + cmdline[cmdline_off++] = '"'; + err = 0; +endFn: + return err; + #undef cmdline_off +} + + +static int appendFromEnvironIfNotEmpty( char*cmdline, int*cmdline_off, int cmdline_cap, const char*envKey ){ + #define cmdline_off (*cmdline_off) + assert(envKey != NULL); + int err; + char envval[0x7FFF]; + const int envval_cap = sizeof envval; + err = GetEnvironmentVariable(envKey, envval, envval_cap-1); + if( err >= envval_cap-1 ){ + LOGDBG("ENOBUFS: %s: environ.%s too long\n", strrchr(__FILE__,'/'), envKey); + err = -ENOBUFS; goto endFn; + } + if( cmdline_cap < cmdline_off + err ){ + LOGDBG("ENOBUFS: %s: Argument list too long\n", strrchr(__FILE__,'/')); + err = -ENOBUFS; goto endFn; + } + if( err > 0 ){ + appendArg(cmdline, cmdline_off, cmdline_cap, envval, err); + cmdline_off += err; + } + err = 0; +endFn: + return err; + #undef cmdline_off +} + + +int main( int argc, char**argv ){ + int err; + char envval[0x7FFF]; + const int envval_cap = sizeof envval; + + /*[see](https://stackoverflow.com/questions/3205027/#comment17734587_3205048)*/ + char cmdline[32767] = "" + + //"%JAVA_HOME%/bin/java.exe" + "C:/work/tmp/arg-printer.exe" + + //" %MAVEN_OPTS%" /*inherit from environ*/ + //" %MAVEN_DEBUG_OPTS%" /*inherit from environ*/ + " -classpath %CLASSWORLDS_JAR%" + " -Dclassworlds.conf=C:/Users/fankhauseand/.opt/maven/bin/m2.conf" + " -Dmaven.home=C:/Users/fankhauseand/.opt/maven" /*MUST NOT end with slash*/ + " -Dmaven.multiModuleProjectDirectory=%WDIR%" /*TODO dir of where the pom resides (LIKELY cwd)*/ + " org.codehaus.plexus.classworlds.launcher.Launcher" + /*TODO append argv1..argvN here*/ + "\0"; + const int cmdline_cap = sizeof cmdline; + int cmdline_off = 0; + for(; cmdline[cmdline_off] != '\0' ; ++cmdline_off ); + + err = 0 + || appendFromEnvironIfNotEmpty(cmdline, &cmdline_off, cmdline_cap, "JVM_CONFIG_MAVEN_PROPS") + || appendFromEnvironIfNotEmpty(cmdline, &cmdline_off, cmdline_cap, "MAVEN_OPTS") + || appendFromEnvironIfNotEmpty(cmdline, &cmdline_off, cmdline_cap, "MAVEN_DEBUG_OPTS") + || appendArg(cmdline, &cmdline_off, cmdline_cap, "-classpath", 10) + ; + if( err ){ LOGDBG("[TRACE] %s:%d\n", __FILE__, __LINE__); goto endFn; } + + /*append all other args*/ + for( int iA=1 ; iA < argc ; ++iA ){ + char *arg = argv[iA]; + int len = strlen(arg); + appendArg(cmdline, it-cmdline, cmdline_cap, envval, len); + it += len; + } + + fprintf(stderr, "[DEBUG] cmdline is:\n%.*s\n", (int)(it-cmdline), cmdline); + + STARTUPINFOA lpsui = { .lpDesktop = NULL, .lpTitle = NULL, .dwFlags = 0, }; + lpsui.cb = sizeof(lpsui); + PROCESS_INFORMATION proc; + fprintf(stderr, "%s: [WARN ] TODO_qgsCALx5AgC2EgIAEggCADEsAgCeawIA\n", strrchr(__FILE__,'/')+1); + /*TODO try CREATE_NO_WINDOW|BELOW_NORMAL_PRIORITY_CLASS */ + err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, + NULL, NULL, &lpsui, &proc); + if( err == 0 ){ + fprintf(stderr, "%s, CreateProcess(): 0x%0lX\n", strrchr(__FILE__,'/')+1, GetLastError()); + err = -1; goto endFn; + } + err = 0; +endFn: + if( err < 0 ) err = -err; + if( err > 0x7F ) err = 1; + return err; +} + diff --git a/src/main/c/postshit/launch/mvn/windoof.h b/src/main/c/postshit/launch/mvn/windoof.h new file mode 100644 index 0000000..b5b93fa --- /dev/null +++ b/src/main/c/postshit/launch/mvn/windoof.h @@ -0,0 +1,79 @@ +#ifndef INCGUARD_8WICAEpuAgDVeQIAui8CAEFpAgBSJQIA +#define INCGUARD_8WICAEpuAgDVeQIAui8CAEFpAgBSJQIA + + +#define assert(expr) do{if(!(expr)){fprintf(stderr,"assert(%s) %s:%d\n", #expr, __FILE__, __LINE__);}}while(0) + +#define NULL ((void*)0) + +#define INT_MAX ((int)0x7FFFFFFF) + +#define ENOBUFS 119 +#define ENOTSUP 129 + +#define CREATE_NO_WINDOW 0x08000000 + +#define BELOW_NORMAL_PRIORITY_CLASS 0x00004000 + + +typedef struct { + int cb; + char* lpReserved; + char* lpDesktop; + char* lpTitle; + int dwX; + int dwY; + int dwXSize; + int dwYSize; + int dwXCountChars; + int dwYCountChars; + int dwFillAttribute; + int dwFlags; + short wShowWindow; + short cbReserved2; + void* lpReserved2; + void* hStdInput; + void* hStdOutput; + void* hStdError; +} STARTUPINFOA; + + +typedef struct { + void* hProcess; + void* hThread; + int dwProcessId; + int dwThreadId; +} PROCESS_INFORMATION; + + +typedef struct { + int nLength; + void* lpSecurityDescriptor; + int bInheritHandle; +} SECURITY_ATTRIBUTES; + + +long unsigned GetLastError(void); + +int fprintf(struct _iobuf*, const char *, ...); + +long long unsigned strlen(const char*); + +char *strrchr(const char *s, int c); + +int CreateProcessA( + const char* lpApplicationName, + char* lpCommandLine, + SECURITY_ATTRIBUTES* lpProcessAttributes, + SECURITY_ATTRIBUTES* lpThreadAttributes, + int bInheritHandles, + int dwCreationFlags, + void* lpEnvironment, + const char* lpCurrentDirectory, + STARTUPINFOA* lpStartupInfo, + PROCESS_INFORMATION* lpProcessInformation +); + + + +#endif /* INCGUARD_8WICAEpuAgDVeQIAui8CAEFpAgBSJQIA */ -- cgit v1.1 From b94f8a85487db2fbdc694fb38f8c025ba39e430c Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 17 Jan 2024 11:10:39 +0100 Subject: mvn-launch.c: Seems to work now --- src/main/c/postshit/launch/mvn/mvn-launch.c | 184 ++++++++++++++++++++-------- 1 file changed, 133 insertions(+), 51 deletions(-) diff --git a/src/main/c/postshit/launch/mvn/mvn-launch.c b/src/main/c/postshit/launch/mvn/mvn-launch.c index 0766c14..7c7796d 100644 --- a/src/main/c/postshit/launch/mvn/mvn-launch.c +++ b/src/main/c/postshit/launch/mvn/mvn-launch.c @@ -5,7 +5,10 @@ execution of 'cmd' files, we need to waste our time writing stuff like this instead doing our work. Grrr... - ${CC:?} -Wall -Werror -fmax-errors=3 -o build/bin/mvn-launch.exe src/main/c/postshit/launch/mvn/mvn-launch.c -Isrc/main/c/postshit/launch/mvn + ${CC:?} -o build/bin/mvn-launch.exe \ + -Wall -Werror -fmax-errors=3 -Wno-error=unused-function -Wno-error=unused-variable \ + src/main/c/postshit/launch/mvn/mvn-launch.c \ + -Isrc/main/c/postshit/launch/mvn \ */ @@ -14,38 +17,77 @@ #include #include +#define LOGERR(...) fprintf(stderr, __VA_ARGS__) #define LOGDBG(...) fprintf(stderr, __VA_ARGS__) -static int appendArg( char*cmdline, int*cmdline_off, int cmdline_cap, const char*newArg, int newArg_len ){ - #define cmdline_off (*cmdline_off) - int err; - if( cmdline_cap < cmdline_off + newArg_len + sizeof" \"\"" ){ - fprintf(stderr, "ENOBUFS: %s cmdline too long\n", strrchr(__FILE__,'/')+1); +static int appendRaw( char*dst, int*dst_off, int dst_cap, const char*src, int src_len ){ + #define dst_off (*dst_off) + register int err; + if( dst_cap < dst_off + src_len ){ + LOGDBG("ENOBUFS: %s Cannot add: %.*s\n", strrchr(__FILE__,'/')+1, src_len, src); err = -ENOBUFS; goto endFn; } - cmdline[cmdline_off++] = ' '; - cmdline[cmdline_off++] = '"'; + memcpy(dst + dst_off, src, src_len); + dst_off += src_len; + err = 0; +endFn: + return err; + #undef dst_off +} + + +static int appendQuotEscaped( char*dst, int*dst_off, int dst_cap, const char*src, int src_len ){ + #define dst_off (*dst_off) + register int err; + if( dst_cap < dst_off + src_len ){ + LOGDBG("ENOBUFS: %s: cannot append \"%.*s\"\n", strrchr(__FILE__,'/')+1, src_len, src); + err = -ENOBUFS; goto endFn; + } + for(; src[0] != '\0' ; ++src ){ + if( src[0] == '"' ){ + LOGDBG("ENOTSUP: %s not impl to handle quotes inside args (TODO_a9o8uz4rga98orui)\n", + strrchr(__FILE__,'/')); + err = -ENOTSUP; goto endFn; + } + dst[dst_off++] = src[0]; + } + err = 0; +endFn: + return err; + #undef dst_off +} + + +static int appendArg( char*cmdline, int*cmdline_len, int cmdline_cap, const char*newArg, int newArg_len ){ + #define cmdline_len (*cmdline_len) + register int err; + if( cmdline_cap < cmdline_len + newArg_len + sizeof" \"\"" ){ + LOGDBG("ENOBUFS: %s cmdline too long\n", strrchr(__FILE__,'/')+1); + err = -ENOBUFS; goto endFn; + } + cmdline[cmdline_len++] = ' '; + cmdline[cmdline_len++] = '"'; for(; newArg[0] != '\0' ; ++newArg ){ if( newArg[0] == '"' ){ - fprintf(stderr, "ENOTSUP: %s not impl to handle quotes inside args (TODO_H0cCAJtBAg)\n", + LOGDBG("ENOTSUP: %s not impl to handle quotes inside args (TODO_H0cCAJtBAg)\n", strrchr(__FILE__,'/')); err = -ENOTSUP; goto endFn; } - cmdline[cmdline_off++] = newArg[0]; + cmdline[cmdline_len++] = newArg[0]; } - cmdline[cmdline_off++] = '"'; + cmdline[cmdline_len++] = '"'; err = 0; endFn: return err; - #undef cmdline_off + #undef cmdline_len } -static int appendFromEnvironIfNotEmpty( char*cmdline, int*cmdline_off, int cmdline_cap, const char*envKey ){ - #define cmdline_off (*cmdline_off) +static int appendFromEnvironEvenIfEmpty( char*cmdline, int*cmdline_len, int cmdline_cap, const char*envKey ){ + #define cmdline_len (*cmdline_len) assert(envKey != NULL); - int err; + register int err; char envval[0x7FFF]; const int envval_cap = sizeof envval; err = GetEnvironmentVariable(envKey, envval, envval_cap-1); @@ -53,74 +95,114 @@ static int appendFromEnvironIfNotEmpty( char*cmdline, int*cmdline_off, int cmdli LOGDBG("ENOBUFS: %s: environ.%s too long\n", strrchr(__FILE__,'/'), envKey); err = -ENOBUFS; goto endFn; } - if( cmdline_cap < cmdline_off + err ){ - LOGDBG("ENOBUFS: %s: Argument list too long\n", strrchr(__FILE__,'/')); + err = appendArg(cmdline, &cmdline_len, cmdline_cap, envval, err); + if( err < 0 ) goto endFn; + cmdline_len += err; + err = 0; +endFn: + return err; + #undef cmdline_len + +} + + +static int appendFromEnvironIfNotEmpty( char*cmdline, int*cmdline_len, int cmdline_cap, const char*envKey ){ + #define cmdline_len (*cmdline_len) + assert(envKey != NULL); + register int err; + char envval[0x7FFF]; + const int envval_cap = sizeof envval; + err = GetEnvironmentVariable(envKey, envval, envval_cap-1); + if( err >= envval_cap-1 ){ + LOGDBG("ENOBUFS: %s: environ.%s too long\n", strrchr(__FILE__,'/'), envKey); err = -ENOBUFS; goto endFn; } if( err > 0 ){ - appendArg(cmdline, cmdline_off, cmdline_cap, envval, err); - cmdline_off += err; + err = appendArg(cmdline, &cmdline_len, cmdline_cap, envval, err); + if( err < 0 ) goto endFn; + cmdline_len += err; } err = 0; endFn: return err; - #undef cmdline_off + #undef cmdline_len } int main( int argc, char**argv ){ - int err; + register int err; char envval[0x7FFF]; const int envval_cap = sizeof envval; - /*[see](https://stackoverflow.com/questions/3205027/#comment17734587_3205048)*/ - char cmdline[32767] = "" - - //"%JAVA_HOME%/bin/java.exe" - "C:/work/tmp/arg-printer.exe" - - //" %MAVEN_OPTS%" /*inherit from environ*/ - //" %MAVEN_DEBUG_OPTS%" /*inherit from environ*/ - " -classpath %CLASSWORLDS_JAR%" - " -Dclassworlds.conf=C:/Users/fankhauseand/.opt/maven/bin/m2.conf" - " -Dmaven.home=C:/Users/fankhauseand/.opt/maven" /*MUST NOT end with slash*/ - " -Dmaven.multiModuleProjectDirectory=%WDIR%" /*TODO dir of where the pom resides (LIKELY cwd)*/ - " org.codehaus.plexus.classworlds.launcher.Launcher" - /*TODO append argv1..argvN here*/ - "\0"; + char username[16]; + const int username_cap = sizeof username; + err = GetEnvironmentVariable("USERNAME", username, username_cap); + if( err == 0 ){ LOGERR("ERROR: GetEnvironmentVariable(USERNAME) -> 0x%lX\n", GetLastError()); err = -1; goto endFn; } + if( err > username_cap ){ LOGERR("ENOBUFS: environ.USERNAME too long\n"); err = -1; goto endFn; } + assert(err > 0); + const int username_len = err; + + char cmdline[32767]; /*[length](https://stackoverflow.com/questions/3205027/#comment17734587_3205048)*/ + cmdline[0] = '\0'; const int cmdline_cap = sizeof cmdline; - int cmdline_off = 0; - for(; cmdline[cmdline_off] != '\0' ; ++cmdline_off ); + int cmdline_len = 0; err = 0 - || appendFromEnvironIfNotEmpty(cmdline, &cmdline_off, cmdline_cap, "JVM_CONFIG_MAVEN_PROPS") - || appendFromEnvironIfNotEmpty(cmdline, &cmdline_off, cmdline_cap, "MAVEN_OPTS") - || appendFromEnvironIfNotEmpty(cmdline, &cmdline_off, cmdline_cap, "MAVEN_DEBUG_OPTS") - || appendArg(cmdline, &cmdline_off, cmdline_cap, "-classpath", 10) + || appendRaw(cmdline, &cmdline_len, cmdline_cap, "C:/Users/", 9) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/java/bin/java.exe", 23) < 0 + || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "JVM_CONFIG_MAVEN_PROPS") < 0 + || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_OPTS") < 0 + || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_DEBUG_OPTS") < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -classpath", 11) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " C:/Users/fankhauseand/.opt/maven/boot/plexus-classworlds-2.5.2.jar", 67) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dclassworlds.conf=C:/Users/fankhauseand/.opt/maven/bin/m2.conf", 64) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dmaven.home=C:/Users/fankhauseand/.opt/maven", 46) < 0 ; if( err ){ LOGDBG("[TRACE] %s:%d\n", __FILE__, __LINE__); goto endFn; } + char tmpBuf[0x7FFF]; + const int tmpBuf_cap = sizeof tmpBuf; + err = GetCurrentDirectory(tmpBuf_cap, tmpBuf); + if( err == 0 ){ + LOGDBG("%s: GetCurrentDirectory() -> 0x%lX\n", strrchr(__FILE__,'/')+1, GetLastError()); + err = -1; goto endFn; } + if( err >= tmpBuf_cap ){ + LOGDBG("ENOBUFS: %s: working dir too long\n", strrchr(__FILE__,'/')+1); + err = -ENOBUFS; goto endFn; } + assert(err > 0); + const int tmpBuf_len = err; + + err = 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " \"-Dmaven.multiModuleProjectDirectory=", 38) < 0 + || appendQuotEscaped(cmdline, &cmdline_len, cmdline_cap, tmpBuf, tmpBuf_len) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, "\"", 1) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " org.codehaus.plexus.classworlds.launcher.Launcher", 50) < 0 + ; + if( err ){ LOGDBG("[TRACE] %s:%d", __FILE__, __LINE__); err = -1; goto endFn; } + /*append all other args*/ for( int iA=1 ; iA < argc ; ++iA ){ char *arg = argv[iA]; - int len = strlen(arg); - appendArg(cmdline, it-cmdline, cmdline_cap, envval, len); - it += len; + err = appendArg(cmdline, &cmdline_len, cmdline_cap, arg, strlen(arg)); + if( err < 0 ){ LOGDBG("[TRACE] %s:%d\n", __FILE__, __LINE__); goto endFn; } } - fprintf(stderr, "[DEBUG] cmdline is:\n%.*s\n", (int)(it-cmdline), cmdline); + //LOGDBG("[DEBUG] cmdline is:\n%.*s\n", cmdline_len, cmdline); STARTUPINFOA lpsui = { .lpDesktop = NULL, .lpTitle = NULL, .dwFlags = 0, }; lpsui.cb = sizeof(lpsui); PROCESS_INFORMATION proc; - fprintf(stderr, "%s: [WARN ] TODO_qgsCALx5AgC2EgIAEggCADEsAgCeawIA\n", strrchr(__FILE__,'/')+1); - /*TODO try CREATE_NO_WINDOW|BELOW_NORMAL_PRIORITY_CLASS */ - err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, - NULL, NULL, &lpsui, &proc); + /*TODO try BELOW_NORMAL_PRIORITY_CLASS */ + err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &lpsui, &proc); if( err == 0 ){ - fprintf(stderr, "%s, CreateProcess(): 0x%0lX\n", strrchr(__FILE__,'/')+1, GetLastError()); + LOGDBG("[DEBUG] CMDLINE: %.*s\n", cmdline_len, cmdline); + LOGERR("%s, CreateProcess(): 0x%0lX\n", strrchr(__FILE__,'/')+1, GetLastError()); err = -1; goto endFn; } + err = WaitForSingleObject(proc.hProcess, INFINITE); + if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: %s: WaitForSingleObject() -> %d %s:%d\n", strrchr(__FILE__,'/')+1, err, __FILE__, __LINE__); + err = -1; goto endFn; } err = 0; endFn: if( err < 0 ) err = -err; -- cgit v1.1 From a54b05e20f4cdd9515a4b34108fc39a8ca090767 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 17 Jan 2024 11:39:23 +0100 Subject: mvn-launchr.c: Cleanup --- src/main/c/postshit/launch/mvn/launchr.c | 5 -- src/main/c/postshit/launch/mvn/mvn-launch.c | 113 +++++++++++----------------- src/main/c/postshit/launch/mvn/windoof.h | 79 ------------------- 3 files changed, 46 insertions(+), 151 deletions(-) delete mode 100644 src/main/c/postshit/launch/mvn/launchr.c delete mode 100644 src/main/c/postshit/launch/mvn/windoof.h diff --git a/src/main/c/postshit/launch/mvn/launchr.c b/src/main/c/postshit/launch/mvn/launchr.c deleted file mode 100644 index 0c7db07..0000000 --- a/src/main/c/postshit/launch/mvn/launchr.c +++ /dev/null @@ -1,5 +0,0 @@ -/* - - TODO move shared launcher stuff here. - -*/ diff --git a/src/main/c/postshit/launch/mvn/mvn-launch.c b/src/main/c/postshit/launch/mvn/mvn-launch.c index 7c7796d..5187b10 100644 --- a/src/main/c/postshit/launch/mvn/mvn-launch.c +++ b/src/main/c/postshit/launch/mvn/mvn-launch.c @@ -13,7 +13,6 @@ */ #include - #include #include @@ -21,41 +20,40 @@ #define LOGDBG(...) fprintf(stderr, __VA_ARGS__) -static int appendRaw( char*dst, int*dst_off, int dst_cap, const char*src, int src_len ){ - #define dst_off (*dst_off) +static int appendRaw( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){ + #define dst_len (*dst_len) register int err; - if( dst_cap < dst_off + src_len ){ - LOGDBG("ENOBUFS: %s Cannot add: %.*s\n", strrchr(__FILE__,'/')+1, src_len, src); + if( dst_cap < dst_len + src_len ){ + LOGERR("ENOBUFS: %s Cannot add: %.*s\n", strrchr(__FILE__,'/')+1, src_len, src); err = -ENOBUFS; goto endFn; } - memcpy(dst + dst_off, src, src_len); - dst_off += src_len; + memcpy(dst + dst_len, src, src_len); + dst_len += src_len; err = 0; endFn: return err; - #undef dst_off + #undef dst_len } -static int appendQuotEscaped( char*dst, int*dst_off, int dst_cap, const char*src, int src_len ){ - #define dst_off (*dst_off) +static int appendQuotEscaped( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){ + #define dst_len (*dst_len) register int err; - if( dst_cap < dst_off + src_len ){ + if( dst_cap < dst_len + src_len ){ LOGDBG("ENOBUFS: %s: cannot append \"%.*s\"\n", strrchr(__FILE__,'/')+1, src_len, src); err = -ENOBUFS; goto endFn; } - for(; src[0] != '\0' ; ++src ){ - if( src[0] == '"' ){ - LOGDBG("ENOTSUP: %s not impl to handle quotes inside args (TODO_a9o8uz4rga98orui)\n", - strrchr(__FILE__,'/')); + for( err = 0 ; err < src_len ; ++err ){ + if( src[err] == '"' ){ + LOGERR("ENOTSUP: Quotes in args not impl. %s:%d\n", __FILE__, __LINE__); err = -ENOTSUP; goto endFn; } - dst[dst_off++] = src[0]; + dst[dst_len++] = src[err]; } err = 0; endFn: return err; - #undef dst_off + #undef dst_len } @@ -63,18 +61,17 @@ static int appendArg( char*cmdline, int*cmdline_len, int cmdline_cap, const char #define cmdline_len (*cmdline_len) register int err; if( cmdline_cap < cmdline_len + newArg_len + sizeof" \"\"" ){ - LOGDBG("ENOBUFS: %s cmdline too long\n", strrchr(__FILE__,'/')+1); + LOGERR("ENOBUFS: Cmdline too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__); err = -ENOBUFS; goto endFn; } cmdline[cmdline_len++] = ' '; cmdline[cmdline_len++] = '"'; - for(; newArg[0] != '\0' ; ++newArg ){ - if( newArg[0] == '"' ){ - LOGDBG("ENOTSUP: %s not impl to handle quotes inside args (TODO_H0cCAJtBAg)\n", - strrchr(__FILE__,'/')); + for( err = 0 ; err < newArg_len ; ++err ){ + if( newArg[err] == '"' ){ + LOGERR("ENOTSUP: Quotes in args not impl. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__); err = -ENOTSUP; goto endFn; } - cmdline[cmdline_len++] = newArg[0]; + cmdline[cmdline_len++] = newArg[err]; } cmdline[cmdline_len++] = '"'; err = 0; @@ -84,28 +81,6 @@ endFn: } -static int appendFromEnvironEvenIfEmpty( char*cmdline, int*cmdline_len, int cmdline_cap, const char*envKey ){ - #define cmdline_len (*cmdline_len) - assert(envKey != NULL); - register int err; - char envval[0x7FFF]; - const int envval_cap = sizeof envval; - err = GetEnvironmentVariable(envKey, envval, envval_cap-1); - if( err >= envval_cap-1 ){ - LOGDBG("ENOBUFS: %s: environ.%s too long\n", strrchr(__FILE__,'/'), envKey); - err = -ENOBUFS; goto endFn; - } - err = appendArg(cmdline, &cmdline_len, cmdline_cap, envval, err); - if( err < 0 ) goto endFn; - cmdline_len += err; - err = 0; -endFn: - return err; - #undef cmdline_len - -} - - static int appendFromEnvironIfNotEmpty( char*cmdline, int*cmdline_len, int cmdline_cap, const char*envKey ){ #define cmdline_len (*cmdline_len) assert(envKey != NULL); @@ -114,12 +89,12 @@ static int appendFromEnvironIfNotEmpty( char*cmdline, int*cmdline_len, int cmdli const int envval_cap = sizeof envval; err = GetEnvironmentVariable(envKey, envval, envval_cap-1); if( err >= envval_cap-1 ){ - LOGDBG("ENOBUFS: %s: environ.%s too long\n", strrchr(__FILE__,'/'), envKey); + LOGERR("ENOBUFS: environ.%s too long. %s:%d\n", envKey, strrchr(__FILE__,'/')+1, __LINE__); err = -ENOBUFS; goto endFn; } if( err > 0 ){ err = appendArg(cmdline, &cmdline_len, cmdline_cap, envval, err); - if( err < 0 ) goto endFn; + if( err < 0 ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; } cmdline_len += err; } err = 0; @@ -131,14 +106,15 @@ endFn: int main( int argc, char**argv ){ register int err; - char envval[0x7FFF]; - const int envval_cap = sizeof envval; char username[16]; const int username_cap = sizeof username; err = GetEnvironmentVariable("USERNAME", username, username_cap); - if( err == 0 ){ LOGERR("ERROR: GetEnvironmentVariable(USERNAME) -> 0x%lX\n", GetLastError()); err = -1; goto endFn; } - if( err > username_cap ){ LOGERR("ENOBUFS: environ.USERNAME too long\n"); err = -1; goto endFn; } + if( err == 0 ){ LOGERR("ERROR: GetEnvironmentVariable(USERNAME) -> 0x%lX\n", GetLastError()); + err = -1; goto endFn; } + if( err > username_cap ){ + LOGERR("ENOBUFS: environ.USERNAME too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; } assert(err > 0); const int username_len = err; @@ -155,20 +131,26 @@ int main( int argc, char**argv ){ || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_OPTS") < 0 || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_DEBUG_OPTS") < 0 || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -classpath", 11) < 0 - || appendRaw(cmdline, &cmdline_len, cmdline_cap, " C:/Users/fankhauseand/.opt/maven/boot/plexus-classworlds-2.5.2.jar", 67) < 0 - || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dclassworlds.conf=C:/Users/fankhauseand/.opt/maven/bin/m2.conf", 64) < 0 - || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dmaven.home=C:/Users/fankhauseand/.opt/maven", 46) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " C:/Users/", 9) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven/boot/plexus-classworlds-2.5.2.jar", 45) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dclassworlds.conf=C:/Users/", 29) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven/bin/m2.conf", 23) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dmaven.home=C:/Users/", 23) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven", 11) < 0 ; - if( err ){ LOGDBG("[TRACE] %s:%d\n", __FILE__, __LINE__); goto endFn; } + if( err ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; } char tmpBuf[0x7FFF]; const int tmpBuf_cap = sizeof tmpBuf; err = GetCurrentDirectory(tmpBuf_cap, tmpBuf); if( err == 0 ){ - LOGDBG("%s: GetCurrentDirectory() -> 0x%lX\n", strrchr(__FILE__,'/')+1, GetLastError()); + LOGERR("ERROR: GetCurrentDirectory() -> 0x%lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); err = -1; goto endFn; } if( err >= tmpBuf_cap ){ - LOGDBG("ENOBUFS: %s: working dir too long\n", strrchr(__FILE__,'/')+1); + LOGERR("ENOBUFS: Working dir too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__); err = -ENOBUFS; goto endFn; } assert(err > 0); const int tmpBuf_len = err; @@ -185,23 +167,20 @@ int main( int argc, char**argv ){ for( int iA=1 ; iA < argc ; ++iA ){ char *arg = argv[iA]; err = appendArg(cmdline, &cmdline_len, cmdline_cap, arg, strlen(arg)); - if( err < 0 ){ LOGDBG("[TRACE] %s:%d\n", __FILE__, __LINE__); goto endFn; } + if( err < 0 ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; } } - //LOGDBG("[DEBUG] cmdline is:\n%.*s\n", cmdline_len, cmdline); - - STARTUPINFOA lpsui = { .lpDesktop = NULL, .lpTitle = NULL, .dwFlags = 0, }; - lpsui.cb = sizeof(lpsui); + STARTUPINFOA startInfo = { .lpDesktop = NULL, .lpTitle = NULL, .dwFlags = 0, }; + startInfo.cb = sizeof(startInfo); PROCESS_INFORMATION proc; - /*TODO try BELOW_NORMAL_PRIORITY_CLASS */ - err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &lpsui, &proc); + err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc); if( err == 0 ){ - LOGDBG("[DEBUG] CMDLINE: %.*s\n", cmdline_len, cmdline); - LOGERR("%s, CreateProcess(): 0x%0lX\n", strrchr(__FILE__,'/')+1, GetLastError()); + LOGERR("[DEBUG] CMDLINE: %.*s\n", cmdline_len, cmdline); + LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); err = -1; goto endFn; } err = WaitForSingleObject(proc.hProcess, INFINITE); - if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: %s: WaitForSingleObject() -> %d %s:%d\n", strrchr(__FILE__,'/')+1, err, __FILE__, __LINE__); + if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: WaitForSingleObject() -> %d. %s:%d\n", err, strrchr(__FILE__,'/')+1, __LINE__); err = -1; goto endFn; } err = 0; endFn: diff --git a/src/main/c/postshit/launch/mvn/windoof.h b/src/main/c/postshit/launch/mvn/windoof.h deleted file mode 100644 index b5b93fa..0000000 --- a/src/main/c/postshit/launch/mvn/windoof.h +++ /dev/null @@ -1,79 +0,0 @@ -#ifndef INCGUARD_8WICAEpuAgDVeQIAui8CAEFpAgBSJQIA -#define INCGUARD_8WICAEpuAgDVeQIAui8CAEFpAgBSJQIA - - -#define assert(expr) do{if(!(expr)){fprintf(stderr,"assert(%s) %s:%d\n", #expr, __FILE__, __LINE__);}}while(0) - -#define NULL ((void*)0) - -#define INT_MAX ((int)0x7FFFFFFF) - -#define ENOBUFS 119 -#define ENOTSUP 129 - -#define CREATE_NO_WINDOW 0x08000000 - -#define BELOW_NORMAL_PRIORITY_CLASS 0x00004000 - - -typedef struct { - int cb; - char* lpReserved; - char* lpDesktop; - char* lpTitle; - int dwX; - int dwY; - int dwXSize; - int dwYSize; - int dwXCountChars; - int dwYCountChars; - int dwFillAttribute; - int dwFlags; - short wShowWindow; - short cbReserved2; - void* lpReserved2; - void* hStdInput; - void* hStdOutput; - void* hStdError; -} STARTUPINFOA; - - -typedef struct { - void* hProcess; - void* hThread; - int dwProcessId; - int dwThreadId; -} PROCESS_INFORMATION; - - -typedef struct { - int nLength; - void* lpSecurityDescriptor; - int bInheritHandle; -} SECURITY_ATTRIBUTES; - - -long unsigned GetLastError(void); - -int fprintf(struct _iobuf*, const char *, ...); - -long long unsigned strlen(const char*); - -char *strrchr(const char *s, int c); - -int CreateProcessA( - const char* lpApplicationName, - char* lpCommandLine, - SECURITY_ATTRIBUTES* lpProcessAttributes, - SECURITY_ATTRIBUTES* lpThreadAttributes, - int bInheritHandles, - int dwCreationFlags, - void* lpEnvironment, - const char* lpCurrentDirectory, - STARTUPINFOA* lpStartupInfo, - PROCESS_INFORMATION* lpProcessInformation -); - - - -#endif /* INCGUARD_8WICAEpuAgDVeQIAui8CAEFpAgBSJQIA */ -- cgit v1.1 From 9f76596bb08d41131ec764841e347cf567e43bd0 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 17 Jan 2024 12:20:10 +0100 Subject: mvn-launcher.c: Add help. Fix bug. Cleanup. --- src/main/c/postshit/launch/mvn/mvn-launch.c | 46 ++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 11 deletions(-) diff --git a/src/main/c/postshit/launch/mvn/mvn-launch.c b/src/main/c/postshit/launch/mvn/mvn-launch.c index 5187b10..b7f221c 100644 --- a/src/main/c/postshit/launch/mvn/mvn-launch.c +++ b/src/main/c/postshit/launch/mvn/mvn-launch.c @@ -7,6 +7,7 @@ ${CC:?} -o build/bin/mvn-launch.exe \ -Wall -Werror -fmax-errors=3 -Wno-error=unused-function -Wno-error=unused-variable \ + -DPROJECT_VERSION=0.0.0-$(date -u +%s) \ src/main/c/postshit/launch/mvn/mvn-launch.c \ -Isrc/main/c/postshit/launch/mvn \ @@ -19,6 +20,9 @@ #define LOGERR(...) fprintf(stderr, __VA_ARGS__) #define LOGDBG(...) fprintf(stderr, __VA_ARGS__) +#define STR_QUOT_3q9o58uhzjad(s) #s +#define STR_QUOT(s) STR_QUOT_3q9o58uhzjad(s) + static int appendRaw( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){ #define dst_len (*dst_len) @@ -107,6 +111,18 @@ endFn: int main( int argc, char**argv ){ register int err; + char tmp[2]; + err = GetEnvironmentVariable("LAUNCHR_HELP", tmp, 1); + if( err == 0 ){ + if( GetLastError() != ERROR_ENVVAR_NOT_FOUND ){ + LOGERR("ERROR: GetEnvironmentVariable(LAUNCHR_HELP): %lu. %s:%d\n", GetLastError(), __FILE__, __LINE__); + err = -1; goto endFn; } + /*no such variable. interpret as no-help-wanted*/; + }else{ + printf("\n %s " STR_QUOT(PROJECT_VERSION) "\n \n Delegates the call to maven without 'cmd' files.\n\n", strrchr(__FILE__,'/')+1); + err = -1; goto endFn; + } + char username[16]; const int username_cap = sizeof username; err = GetEnvironmentVariable("USERNAME", username, username_cap); @@ -131,7 +147,7 @@ int main( int argc, char**argv ){ || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_OPTS") < 0 || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_DEBUG_OPTS") < 0 || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -classpath", 11) < 0 - || appendRaw(cmdline, &cmdline_len, cmdline_cap, " C:/Users/", 9) < 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, " C:/Users/", 10) < 0 || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0 || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven/boot/plexus-classworlds-2.5.2.jar", 45) < 0 || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dclassworlds.conf=C:/Users/", 29) < 0 @@ -143,21 +159,22 @@ int main( int argc, char**argv ){ ; if( err ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; } - char tmpBuf[0x7FFF]; - const int tmpBuf_cap = sizeof tmpBuf; - err = GetCurrentDirectory(tmpBuf_cap, tmpBuf); + char workDir[0x7FFF]; + const int workDir_cap = sizeof workDir; + err = GetCurrentDirectory(workDir_cap, workDir); if( err == 0 ){ LOGERR("ERROR: GetCurrentDirectory() -> 0x%lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); err = -1; goto endFn; } - if( err >= tmpBuf_cap ){ + if( err >= workDir_cap ){ LOGERR("ENOBUFS: Working dir too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__); err = -ENOBUFS; goto endFn; } assert(err > 0); - const int tmpBuf_len = err; + const int workDir_len = err; + for( err = 0 ; err < workDir_len ; ++err ){ if( workDir[err] == '\\' ) workDir[err] = '/'; } err = 0 || appendRaw(cmdline, &cmdline_len, cmdline_cap, " \"-Dmaven.multiModuleProjectDirectory=", 38) < 0 - || appendQuotEscaped(cmdline, &cmdline_len, cmdline_cap, tmpBuf, tmpBuf_len) < 0 + || appendQuotEscaped(cmdline, &cmdline_len, cmdline_cap, workDir, workDir_len) < 0 || appendRaw(cmdline, &cmdline_len, cmdline_cap, "\"", 1) < 0 || appendRaw(cmdline, &cmdline_len, cmdline_cap, " org.codehaus.plexus.classworlds.launcher.Launcher", 50) < 0 ; @@ -175,17 +192,24 @@ int main( int argc, char**argv ){ PROCESS_INFORMATION proc; err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc); if( err == 0 ){ - LOGERR("[DEBUG] CMDLINE: %.*s\n", cmdline_len, cmdline); LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); err = -1; goto endFn; } err = WaitForSingleObject(proc.hProcess, INFINITE); - if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: WaitForSingleObject() -> %d. %s:%d\n", err, strrchr(__FILE__,'/')+1, __LINE__); + if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: WaitForSingleObject() -> %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); err = -1; goto endFn; } - err = 0; + long unsigned exitCode; + err = GetExitCodeProcess(proc.hProcess, &exitCode); + if( err == 0 ){ LOGERR("ERROR: GetExitCodeProcess(): %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; } + if( (exitCode & 0x7FFFFFFF) != exitCode ){ + LOGERR("EDOM: Exit code %lu out of bounds. %s:%d\n", exitCode, strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; + } + err = exitCode; endFn: + if( err != 0 && cmdline_len > 0 ){ LOGDBG("[DEBUG] %.*s\n", cmdline_len, cmdline); } if( err < 0 ) err = -err; - if( err > 0x7F ) err = 1; return err; } -- cgit v1.1 From 1c9daeb1891b9a583047f2fe7314a265df357ed8 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 17 Jan 2024 15:07:59 +0100 Subject: mvn-versions-set.c: Impl this one in C --- src/main/c/postshit/launch/mvn/mvn-launch.c | 3 +- src/main/c/postshit/launch/mvn/mvn-versions-set.c | 133 ++++++++++++++++++++++ 2 files changed, 134 insertions(+), 2 deletions(-) create mode 100644 src/main/c/postshit/launch/mvn/mvn-versions-set.c diff --git a/src/main/c/postshit/launch/mvn/mvn-launch.c b/src/main/c/postshit/launch/mvn/mvn-launch.c index b7f221c..8886e9e 100644 --- a/src/main/c/postshit/launch/mvn/mvn-launch.c +++ b/src/main/c/postshit/launch/mvn/mvn-launch.c @@ -9,7 +9,6 @@ -Wall -Werror -fmax-errors=3 -Wno-error=unused-function -Wno-error=unused-variable \ -DPROJECT_VERSION=0.0.0-$(date -u +%s) \ src/main/c/postshit/launch/mvn/mvn-launch.c \ - -Isrc/main/c/postshit/launch/mvn \ */ @@ -178,7 +177,7 @@ int main( int argc, char**argv ){ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "\"", 1) < 0 || appendRaw(cmdline, &cmdline_len, cmdline_cap, " org.codehaus.plexus.classworlds.launcher.Launcher", 50) < 0 ; - if( err ){ LOGDBG("[TRACE] %s:%d", __FILE__, __LINE__); err = -1; goto endFn; } + if( err ){ LOGDBG("[TRACE] at %s:%d", __FILE__, __LINE__); err = -1; goto endFn; } /*append all other args*/ for( int iA=1 ; iA < argc ; ++iA ){ diff --git a/src/main/c/postshit/launch/mvn/mvn-versions-set.c b/src/main/c/postshit/launch/mvn/mvn-versions-set.c new file mode 100644 index 0000000..888183d --- /dev/null +++ b/src/main/c/postshit/launch/mvn/mvn-versions-set.c @@ -0,0 +1,133 @@ +/* + + Shitty policies require shitty workarounds. Standard maven ships with a 'cmd' + file for its execution. But as some shiny 'security' policies forbid + execution of 'cmd' files, we need to waste our time writing stuff like this + instead doing our work. Grrr... + + ${CC:?} -o build/bin/mvn-versions-set.exe \ + -Wall -Werror -fmax-errors=3 -Wno-error=unused-function -Wno-error=unused-variable \ + -DPROJECT_VERSION=0.0.0-$(date -u +%s) \ + src/main/c/postshit/launch/mvn/mvn-versions-set.c \ + +*/ + +#include +#include +#include + +#define LOGERR(...) fprintf(stderr, __VA_ARGS__) +#define LOGDBG(...) fprintf(stderr, __VA_ARGS__) + +#define STR_QUOT_3q9o58uhzjad(s) #s +#define STR_QUOT(s) STR_QUOT_3q9o58uhzjad(s) + + +static int appendRaw( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){ + #define dst_len (*dst_len) + register int err; + if( dst_cap < dst_len + src_len ){ + LOGERR("ENOBUFS: %s Cannot add: %.*s\n", strrchr(__FILE__,'/')+1, src_len, src); + err = -ENOBUFS; goto endFn; + } + memcpy(dst + dst_len, src, src_len); + dst_len += src_len; + err = 0; +endFn: + return err; + #undef dst_len +} + + +static int appendQuotEscaped( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){ + #define dst_len (*dst_len) + register int err; + if( dst_cap < dst_len + src_len ){ + LOGDBG("ENOBUFS: %s: cannot append \"%.*s\"\n", strrchr(__FILE__,'/')+1, src_len, src); + err = -ENOBUFS; goto endFn; + } + for( err = 0 ; err < src_len ; ++err ){ + if( src[err] == '"' ){ + LOGERR("ENOTSUP: Quotes in args not impl. %s:%d\n", __FILE__, __LINE__); + err = -ENOTSUP; goto endFn; + } + dst[dst_len++] = src[err]; + } + err = 0; +endFn: + return err; + #undef dst_len +} + + +int main( int argc, char**argv ){ + register int err; + int isHelp = 0; + const char *newVersion = NULL; + + /*parse args*/ + for( err = 1 ; err < argc ; ++err ){ + const char *arg = argv[err]; + if( !strcmp(arg, "--help") ){ + isHelp = !0; break; + }else if( newVersion == NULL ){ + newVersion = arg; + }else{ + LOGERR("EINVAL: Only ONE arg expected. But got: %s\n", arg); err = -1; goto endFn; + } + } + if( isHelp ){ + printf("\n" + " %s " STR_QUOT(PROJECT_VERSION) "\n" + " \n" + " Set a specific maven version. Usage:\n" + " \n" + " %s 0.0.0-SNAPSHOT\n" + "\n", strrchr(__FILE__,'/')+1, argv[0]); + err = -1; goto endFn; + } + if( newVersion == NULL ){ + LOGERR("EINVAL: new version to use missing. Try --help\n"); + err = -1; goto endFn; + } + const int newVersion_len = strlen(newVersion); + + char cmdline[32767]; /*[length](https://stackoverflow.com/questions/3205027/#comment17734587_3205048)*/ + cmdline[0] = '\0'; + const int cmdline_cap = sizeof cmdline; + int cmdline_len = 0; + + err = 0 + || appendRaw(cmdline, &cmdline_len, cmdline_cap, "mvn versions:set -DgenerateBackupPoms=false \"-DnewVersion=", 58) < 0 + || appendQuotEscaped(cmdline, &cmdline_len, cmdline_cap, newVersion, newVersion_len) + || appendRaw(cmdline, &cmdline_len, cmdline_cap, "\"", 1) < 0 + ; + if( err ){ LOGDBG("[TRACE] at %s:%d", __FILE__, __LINE__); err = -1; goto endFn; } + + STARTUPINFOA startInfo = { .lpDesktop = NULL, .lpTitle = NULL, .dwFlags = 0, }; + startInfo.cb = sizeof(startInfo); + PROCESS_INFORMATION proc; + err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc); + if( err == 0 ){ + LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; + } + err = WaitForSingleObject(proc.hProcess, INFINITE); + if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: WaitForSingleObject() -> %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; } + long unsigned exitCode; + err = GetExitCodeProcess(proc.hProcess, &exitCode); + if( err == 0 ){ LOGERR("ERROR: GetExitCodeProcess(): %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; } + if( (exitCode & 0x7FFFFFFF) != exitCode ){ + LOGERR("EDOM: Exit code %lu out of bounds. %s:%d\n", exitCode, strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; + } + err = exitCode; +endFn: + if( err != 0 && cmdline_len > 0 ){ LOGDBG("[DEBUG] %.*s\n", cmdline_len, cmdline); } + if( err < 0 ) err = -err; + return err; +} + + -- cgit v1.1 From 679d534c67013c547be5f2e16681cf92b4f3ff73 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 17 Jan 2024 16:42:25 +0100 Subject: Update houston patch --- src/main/patch/houston/default.patch | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/patch/houston/default.patch b/src/main/patch/houston/default.patch index 4fcfa8c..52017d2 100644 --- a/src/main/patch/houston/default.patch +++ b/src/main/patch/houston/default.patch @@ -2,22 +2,22 @@ Custom houston patch to have a "usable" service at all. Patch based on "develop" aka - "3b1275e123c2b7aa2ffaa34270a5e1a373a65993" from "2023-04-27". + "125344e940ebc090183bad7fc096938289f15e3f" from "2024-01-16". diff --git a/pom.xml b/pom.xml index 0ed4f7f3..b44c5693 100644 --- a/pom.xml +++ b/pom.xml -@@ -72,8 +72,6 @@ - false +@@ -73,7 +73,7 @@ false -- + - compile - ++ none - + + 9.4.43.v20210629 diff --git a/houston-process/pom.xml b/houston-process/pom.xml index 374dcb97..3c24937c 100644 --- a/houston-process/pom.xml @@ -69,9 +69,9 @@ index 432efb01..d1729fe9 100644 qc.add( - new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays)); + new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/)); - - RedisquesConfiguration redisquesConfig = - RedisquesConfiguration.with() + LOGGER.info( + "Redisques redis-client will created with MaxPoolSize: {}, MaxPoolWaiting: {}, MaxWaitingHandlers: {}", + Props.getMaxRedisConnectionPoolSize4RedisQues(), diff --git a/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java b/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java new file mode 100644 index 00000000..aa3aa2e0 -- cgit v1.1 From 9bfc439cfb350a8677f0d4f1a2be3686912f97bb Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 17 Jan 2024 19:03:28 +0100 Subject: Nothing special. --- doc/note/links/links.txt | 1 + doc/note/maven/maven.txt | 2 +- src/main/lua/paisa-logs/DigHoustonLogs.lua | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 4bb0c5c..8f7462c 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -261,6 +261,7 @@ Links (Aka argument amplifiers) - [How to repair KISS for performance](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff) - [Houston readyness fails often](https://jira.post.ch/browse/SDCISA-13746?focusedId=1899551&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1899551) - [Just one message per minute](https://gitit.post.ch/projects/ISA/repos/eagle/pull-requests/375/overview?commentId=330543) +- [Houston down readyness probe timeout](https://wikit.post.ch/x/koO0Vg) ## Errorhandling is not needed ... - [OOM exit code 137 9 sigkill houston openshift pod](https://jira.post.ch/browse/SDCISA-13746?focusedId=1925526&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1925526) diff --git a/doc/note/maven/maven.txt b/doc/note/maven/maven.txt index eb55ba0..02517a8 100644 --- a/doc/note/maven/maven.txt +++ b/doc/note/maven/maven.txt @@ -29,6 +29,6 @@ mvn verify -U -DSelBaseUrl=http://localhost:7012/apigateway/services/foo/index.h mvn dependency:go-offline -(See als "https://maven.apache.org/plugins/maven-dependency-plugin/go-offline-mojo.html") +[See also](https://maven.apache.org/plugins/maven-dependency-plugin/go-offline-mojo.html) diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua index ebe7afe..e217f82 100644 --- a/src/main/lua/paisa-logs/DigHoustonLogs.lua +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -35,8 +35,8 @@ function loadFilters( that ) assert(not that.filters) that.filters = { - { action = "drop", beforeDate = "2023-10-18 03:00:00.000", }, - { action = "drop", afterDate = "2024-01-31 23:59:59.999", }, +-- { action = "drop", beforeDate = "2023-10-18 03:00:00.000", }, +-- { action = "drop", afterDate = "2024-01-31 23:59:59.999", }, { action = "drop", level = "TRACE" }, { action = "drop", level = "DEBUG" }, -- cgit v1.1 From 3c37451a37878c7958825b38c7ade5839026a97d Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 19 Jan 2024 16:09:21 +0100 Subject: Add doc for java/JVM opens UNNAMED module. --- doc/note/java/java.txt | 7 +++++++ doc/note/maven/maven.txt | 2 ++ 2 files changed, 9 insertions(+) create mode 100644 doc/note/java/java.txt diff --git a/doc/note/java/java.txt b/doc/note/java/java.txt new file mode 100644 index 0000000..8dc01a2 --- /dev/null +++ b/doc/note/java/java.txt @@ -0,0 +1,7 @@ + +Java / JVM +================ + + --add-opens java.base/java.lang=ALL-UNNAMED + + diff --git a/doc/note/maven/maven.txt b/doc/note/maven/maven.txt index 02517a8..cdfdd9a 100644 --- a/doc/note/maven/maven.txt +++ b/doc/note/maven/maven.txt @@ -12,6 +12,8 @@ mvn versions:update-parent -DparentVersion= mvn versions:set-property -Dproperty=foo.bar -DnewVersion=gugus +export MAVEN_OPTS="..." + ## Deploy paisa snapshot mvn deploy -DaltDeploymentRepository=artifactory-snapshots::default::https://artifactory.tools.pnet.ch/artifactory/libs-snapshot-local -- cgit v1.1 From 8bf929b59fb4688b58fdea59548a0cbe3cb55f6d Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sun, 21 Jan 2024 13:05:02 +0100 Subject: Add doc about expand expiry of gpg keys --- doc/note/gpg/gpg.txt | 18 ++++++++++++++++++ doc/note/mount/mount.txt | 8 ++++++++ 2 files changed, 26 insertions(+) create mode 100644 doc/note/mount/mount.txt diff --git a/doc/note/gpg/gpg.txt b/doc/note/gpg/gpg.txt index 0089221..5340945 100644 --- a/doc/note/gpg/gpg.txt +++ b/doc/note/gpg/gpg.txt @@ -67,6 +67,24 @@ you're doing! If you don't, you MUST NOT use those instructions! gpgwin --sign-key foreignUser@example.com +## Expand detached subkey expiry + + cd "${WORKDIR:?}" + mkdir master + (cd "${OFFHOME:?}/.gnupg" && tar c *) | (cd master && tar x) + export GNUPGHOME="${WORKDIR:?}/master" + gpg --list-secret-keys --with-keygrip --keyid-format=long --with-fingerprint --with-subkey-fingerprint --list-options show-unusable-subkeys + gpg --edit-key SSB_KEY + key 1 + key 2 + expire + save + gpg --export-secret-subkeys F00! BA5! > subkey-$(date -u +%Y%m%d).sec.gpg + gpg --export F00! BA5! > subkey-$(date -u +%Y%m%d).pub.gpg + tar --owner=0 --group=0 -c subkey-$(date -u +%Y%m%d).*.gpg | (cd "${OFFHOME:?}" && sudo tar x) + md5sum -b subkey-$(date -u +%Y%m%d).*.gpg | sudo tee -a "${OFFHOME:?}/MD5SUM" + + ## Use keys with throw-away keyring GNUPGHOME="/tmp/foo/" diff --git a/doc/note/mount/mount.txt b/doc/note/mount/mount.txt new file mode 100644 index 0000000..99e1521 --- /dev/null +++ b/doc/note/mount/mount.txt @@ -0,0 +1,8 @@ + +## tmpfs / ramfs + +TODO: it seems as 'size' has no effect, and system may run OOM instead. + + mount -t ramfs -o size=1G ramfs /mnt/ramfs + chown -R $(whoami):$(whoami) /mnt/ramfs + -- cgit v1.1 From 5392ae098a14a82f339758ae997b9e0aa9d81a96 Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sun, 21 Jan 2024 14:10:53 +0100 Subject: Add more notes to gpg --- doc/note/gpg/gpg.txt | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/doc/note/gpg/gpg.txt b/doc/note/gpg/gpg.txt index 5340945..5580a13 100644 --- a/doc/note/gpg/gpg.txt +++ b/doc/note/gpg/gpg.txt @@ -69,20 +69,32 @@ you're doing! If you don't, you MUST NOT use those instructions! ## Expand detached subkey expiry +PS: Why is this so fu***** damn complicated! Anyone still wondering why this + system is not used by most humans?!? Please STOP producing so uselessly + complicated software! + cd "${WORKDIR:?}" mkdir master - (cd "${OFFHOME:?}/.gnupg" && tar c *) | (cd master && tar x) + (cd "${OFFHOME:?}/.gnupg" && tar c $(ls -A)) | (cd master && tar x) export GNUPGHOME="${WORKDIR:?}/master" gpg --list-secret-keys --with-keygrip --keyid-format=long --with-fingerprint --with-subkey-fingerprint --list-options show-unusable-subkeys + gpg --edit-key SEC_KEY + expire + save gpg --edit-key SSB_KEY key 1 key 2 expire save - gpg --export-secret-subkeys F00! BA5! > subkey-$(date -u +%Y%m%d).sec.gpg - gpg --export F00! BA5! > subkey-$(date -u +%Y%m%d).pub.gpg - tar --owner=0 --group=0 -c subkey-$(date -u +%Y%m%d).*.gpg | (cd "${OFFHOME:?}" && sudo tar x) - md5sum -b subkey-$(date -u +%Y%m%d).*.gpg | sudo tee -a "${OFFHOME:?}/MD5SUM" + NOW=$(date -u +%Y%m%d-%H%MZ) + gpg --export-secret-subkeys F00! BA5! > subkey-${NOW:?}.sec.gpg + gpg --export F00! BA5! > subkey-${NOW:?}.pub.gpg + gpg --list-packets subkey-${NOW:?}.sec.gpg + gpg --list-packets subkey-${NOW:?}.pub.gpg + tar --owner=0 --group=0 -c subkey-${NOW:?}.*.gpg | (cd "${OFFHOME:?}" && sudo tar x) + md5sum -b subkey-${NOW:?}.*.gpg | sudo tee -a "${OFFHOME:?}/MD5SUM" + echo "After import, you'd likely want to change phrase away from master" + gpg --edit-key foo@example.com passwd quit ## Use keys with throw-away keyring -- cgit v1.1 From a76939d07f8072f2f33930127ad463df4a437433 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 23 Jan 2024 16:50:59 +0100 Subject: Some kludge code to debug gateleen --- .../DelegateHttpServerRequest.java | 394 ++++++++++++++++++++ .../DelegateVertxHttpServerRequestInternal.java | 408 +++++++++++++++++++++ .../DelegateVertxHttpServerResponse.java | 105 ++++++ .../gateleenKludge/tmoutissue20240123/Foo.java | 129 +++++++ 4 files changed, 1036 insertions(+) create mode 100644 src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java create mode 100644 src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java create mode 100644 src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java create mode 100644 src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java new file mode 100644 index 0000000..aa4ad48 --- /dev/null +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java @@ -0,0 +1,394 @@ +package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123; + +import io.netty.handler.codec.DecoderResult; +import io.vertx.core.AsyncResult; +import io.vertx.core.Future; +import io.vertx.core.Handler; +import io.vertx.core.MultiMap; +import io.vertx.core.buffer.Buffer; +import io.vertx.core.http.Cookie; +import io.vertx.core.http.HttpConnection; +import io.vertx.core.http.HttpFrame; +import io.vertx.core.http.HttpMethod; +import io.vertx.core.http.HttpServerFileUpload; +import io.vertx.core.http.HttpServerRequest; +import io.vertx.core.http.HttpServerResponse; +import io.vertx.core.http.HttpVersion; +import io.vertx.core.http.ServerWebSocket; +import io.vertx.core.http.StreamPriority; +import io.vertx.core.net.NetSocket; +import io.vertx.core.net.SocketAddress; +import io.vertx.core.streams.Pipe; +import io.vertx.core.streams.WriteStream; + +import javax.net.ssl.SSLPeerUnverifiedException; +import javax.net.ssl.SSLSession; +import javax.security.cert.X509Certificate; +import java.util.Map; +import java.util.Set; + +public class DelegateHttpServerRequest implements io.vertx.core.http.HttpServerRequest { + + private final io.vertx.core.http.HttpServerRequest delegate; + private final boolean isDebugging = true; + + public DelegateHttpServerRequest(HttpServerRequest delegate) { + this.delegate = delegate; + } + + private void breakpoint(){ + try{ + throw new UnsupportedOperationException(); + }catch(UnsupportedOperationException ex){} + } + + @Override + public HttpServerRequest exceptionHandler(Handler handler) { + if( isDebugging ) breakpoint(); + return delegate.exceptionHandler(handler); + } + + @Override + public HttpServerRequest handler(Handler handler) { + if( isDebugging ) breakpoint(); + return delegate.handler(handler); + } + + @Override + public HttpServerRequest pause() { + if( isDebugging ) breakpoint(); + return delegate.pause(); + } + + @Override + public HttpServerRequest resume() { + if( isDebugging ) breakpoint(); + return delegate.resume(); + } + + @Override + public HttpServerRequest fetch(long amount) { + if( isDebugging ) breakpoint(); + return delegate.fetch(amount); + } + + @Override + public HttpServerRequest endHandler(Handler endHandler) { + if( isDebugging ) breakpoint(); + return delegate.endHandler(endHandler); + } + + @Override + public HttpVersion version() { + if( isDebugging ) breakpoint(); + return delegate.version(); + } + + @Override + public HttpMethod method() { + if( isDebugging ) breakpoint(); + return delegate.method(); + } + + @Override + public boolean isSSL() { + if( isDebugging ) breakpoint(); + return delegate.isSSL(); + } + + @Override + public String scheme() { + if( isDebugging ) breakpoint(); + return delegate.scheme(); + } + + @Override + public String uri() { + if( isDebugging ) breakpoint(); + return delegate.uri(); + } + + @Override + public String path() { + if( isDebugging ) breakpoint(); + return delegate.path(); + } + + @Override + public String query() { + if( isDebugging ) breakpoint(); + return delegate.query(); + } + + @Override + public String host() { + if( isDebugging ) breakpoint(); + return delegate.host(); + } + + @Override + public long bytesRead() { + if( isDebugging ) breakpoint(); + return delegate.bytesRead(); + } + + @Override + public HttpServerResponse response() { + if( isDebugging ) breakpoint(); + return delegate.response(); + } + + @Override + public MultiMap headers() { + if( isDebugging ) breakpoint(); + return delegate.headers(); + } + + @Override + public String getHeader(String headerName) { + if( isDebugging ) breakpoint(); + return delegate.getHeader(headerName); + } + + @Override + public String getHeader(CharSequence headerName) { + if( isDebugging ) breakpoint(); + return delegate.getHeader(headerName); + } + + @Override + public MultiMap params() { + if( isDebugging ) breakpoint(); + return delegate.params(); + } + + @Override + public String getParam(String paramName) { + if( isDebugging ) breakpoint(); + return delegate.getParam(paramName); + } + + @Override + public String getParam(String paramName, String defaultValue) { + if( isDebugging ) breakpoint(); + return delegate.getParam(paramName, defaultValue); + } + + @Override + public SocketAddress remoteAddress() { + if( isDebugging ) breakpoint(); + return delegate.remoteAddress(); + } + + @Override + public SocketAddress localAddress() { + if( isDebugging ) breakpoint(); + return delegate.localAddress(); + } + + @Override + public SSLSession sslSession() { + if( isDebugging ) breakpoint(); + return delegate.sslSession(); + } + + @Override + public X509Certificate[] peerCertificateChain() throws SSLPeerUnverifiedException { + if( isDebugging ) breakpoint(); + return delegate.peerCertificateChain(); + } + + @Override + public String absoluteURI() { + if( isDebugging ) breakpoint(); + return delegate.absoluteURI(); + } + + @Override + public HttpServerRequest bodyHandler(Handler bodyHandler) { + if( isDebugging ) breakpoint(); + return delegate.bodyHandler(bodyHandler); + } + + @Override + public HttpServerRequest body(Handler> handler) { + if( isDebugging ) breakpoint(); + return delegate.body(handler); + } + + @Override + public Future body() { + if( isDebugging ) breakpoint(); + return delegate.body(); + } + + @Override + public void end(Handler> handler) { + if( isDebugging ) breakpoint(); + delegate.end(handler); + } + + @Override + public Future end() { + if( isDebugging ) breakpoint(); + return delegate.end(); + } + + @Override + public void toNetSocket(Handler> handler) { + if( isDebugging ) breakpoint(); + delegate.toNetSocket(handler); + } + + @Override + public Future toNetSocket() { + if( isDebugging ) breakpoint(); + return delegate.toNetSocket(); + } + + @Override + public HttpServerRequest setExpectMultipart(boolean expect) { + if( isDebugging ) breakpoint(); + return delegate.setExpectMultipart(expect); + } + + @Override + public boolean isExpectMultipart() { + if( isDebugging ) breakpoint(); + return delegate.isExpectMultipart(); + } + + @Override + public HttpServerRequest uploadHandler(Handler uploadHandler) { + if( isDebugging ) breakpoint(); + return delegate.uploadHandler(uploadHandler); + } + + @Override + public MultiMap formAttributes() { + if( isDebugging ) breakpoint(); + return delegate.formAttributes(); + } + + @Override + public String getFormAttribute(String attributeName) { + if( isDebugging ) breakpoint(); + return delegate.getFormAttribute(attributeName); + } + + @Override + public int streamId() { + if( isDebugging ) breakpoint(); + return delegate.streamId(); + } + + @Override + public void toWebSocket(Handler> handler) { + if( isDebugging ) breakpoint(); + delegate.toWebSocket(handler); + } + + @Override + public Future toWebSocket() { + if( isDebugging ) breakpoint(); + return delegate.toWebSocket(); + } + + @Override + public boolean isEnded() { + if( isDebugging ) breakpoint(); + return delegate.isEnded(); + } + + @Override + public HttpServerRequest customFrameHandler(Handler handler) { + if( isDebugging ) breakpoint(); + return delegate.customFrameHandler(handler); + } + + @Override + public HttpConnection connection() { + if( isDebugging ) breakpoint(); + return delegate.connection(); + } + + @Override + public StreamPriority streamPriority() { + if( isDebugging ) breakpoint(); + return delegate.streamPriority(); + } + + @Override + public HttpServerRequest streamPriorityHandler(Handler handler) { + if( isDebugging ) breakpoint(); + return delegate.streamPriorityHandler(handler); + } + + @Override + public DecoderResult decoderResult() { + if( isDebugging ) breakpoint(); + return delegate.decoderResult(); + } + + @Override + public Cookie getCookie(String name) { + if( isDebugging ) breakpoint(); + return delegate.getCookie(name); + } + + @Override + public Cookie getCookie(String name, String domain, String path) { + if( isDebugging ) breakpoint(); + return delegate.getCookie(name, domain, path); + } + + @Override + public int cookieCount() { + if( isDebugging ) breakpoint(); + return delegate.cookieCount(); + } + + @Override + @Deprecated + public Map cookieMap() { + if( isDebugging ) breakpoint(); + return delegate.cookieMap(); + } + + @Override + public Set cookies(String name) { + if( isDebugging ) breakpoint(); + return delegate.cookies(name); + } + + @Override + public Set cookies() { + if( isDebugging ) breakpoint(); + return delegate.cookies(); + } + + @Override + public HttpServerRequest routed(String route) { + if( isDebugging ) breakpoint(); + return delegate.routed(route); + } + + @Override + public Pipe pipe() { + if( isDebugging ) breakpoint(); + return delegate.pipe(); + } + + @Override + public Future pipeTo(WriteStream dst) { + if( isDebugging ) breakpoint(); + return delegate.pipeTo(dst); + } + + @Override + public void pipeTo(WriteStream dst, Handler> handler) { + if( isDebugging ) breakpoint(); + delegate.pipeTo(dst, handler); + } + +} diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java new file mode 100644 index 0000000..16c7259 --- /dev/null +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java @@ -0,0 +1,408 @@ +package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123; + +import io.netty.handler.codec.DecoderResult; +import io.vertx.core.AsyncResult; +import io.vertx.core.Context; +import io.vertx.core.Future; +import io.vertx.core.Handler; +import io.vertx.core.MultiMap; +import io.vertx.core.buffer.Buffer; +import io.vertx.core.http.Cookie; +import io.vertx.core.http.HttpConnection; +import io.vertx.core.http.HttpFrame; +import io.vertx.core.http.HttpMethod; +import io.vertx.core.http.HttpServerFileUpload; +import io.vertx.core.http.HttpServerRequest; +import io.vertx.core.http.HttpServerResponse; +import io.vertx.core.http.HttpVersion; +import io.vertx.core.http.ServerWebSocket; +import io.vertx.core.http.StreamPriority; +import io.vertx.core.http.impl.HttpServerRequestInternal; +import io.vertx.core.net.NetSocket; +import io.vertx.core.net.SocketAddress; +import io.vertx.core.streams.Pipe; +import io.vertx.core.streams.WriteStream; + +import javax.net.ssl.SSLPeerUnverifiedException; +import javax.net.ssl.SSLSession; +import javax.security.cert.X509Certificate; +import java.util.Map; +import java.util.Set; + +public class DelegateVertxHttpServerRequestInternal implements HttpServerRequestInternal { + + private final HttpServerRequestInternal delegate; + private final boolean isDebugging = true; + + public DelegateVertxHttpServerRequestInternal(HttpServerRequest delegate) { + this.delegate = (HttpServerRequestInternal) delegate; + } + + private void breakpoint(){ + try{ + throw new UnsupportedOperationException(); + }catch(UnsupportedOperationException ex){} + } + + @Override + public HttpServerRequest exceptionHandler(Handler handler) { + if( isDebugging ) breakpoint(); + return delegate.exceptionHandler(handler); + } + + @Override + public HttpServerRequest handler(Handler handler) { + if( isDebugging ) breakpoint(); + return delegate.handler(handler); + } + + @Override + public HttpServerRequest pause() { + if( isDebugging ) breakpoint(); + return delegate.pause(); + } + + @Override + public HttpServerRequest resume() { + if( isDebugging ) breakpoint(); + return delegate.resume(); + } + + @Override + public HttpServerRequest fetch(long amount) { + if( isDebugging ) breakpoint(); + return delegate.fetch(amount); + } + + @Override + public HttpServerRequest endHandler(Handler endHandler) { + if( isDebugging ) breakpoint(); + return delegate.endHandler(endHandler); + } + + @Override + public HttpVersion version() { + if( isDebugging ) breakpoint(); + return delegate.version(); + } + + @Override + public HttpMethod method() { + if( isDebugging ) breakpoint(); + return delegate.method(); + } + + @Override + public boolean isSSL() { + if( isDebugging ) breakpoint(); + return delegate.isSSL(); + } + + @Override + public String scheme() { + if( isDebugging ) breakpoint(); + return delegate.scheme(); + } + + @Override + public String uri() { + if( isDebugging ) breakpoint(); + return delegate.uri(); + } + + @Override + public String path() { + if( isDebugging ) breakpoint(); + return delegate.path(); + } + + @Override + public String query() { + if( isDebugging ) breakpoint(); + return delegate.query(); + } + + @Override + public String host() { + if( isDebugging ) breakpoint(); + return delegate.host(); + } + + @Override + public long bytesRead() { + if( isDebugging ) breakpoint(); + return delegate.bytesRead(); + } + + @Override + public HttpServerResponse response() { + if( isDebugging ) breakpoint(); + return delegate.response(); + } + + @Override + public MultiMap headers() { + if( isDebugging ) breakpoint(); + return delegate.headers(); + } + + @Override + public String getHeader(String headerName) { + if( isDebugging ) breakpoint(); + return delegate.getHeader(headerName); + } + + @Override + public String getHeader(CharSequence headerName) { + if( isDebugging ) breakpoint(); + return delegate.getHeader(headerName); + } + + @Override + public MultiMap params() { + if( isDebugging ) breakpoint(); + return delegate.params(); + } + + @Override + public String getParam(String paramName) { + if( isDebugging ) breakpoint(); + return delegate.getParam(paramName); + } + + @Override + public String getParam(String paramName, String defaultValue) { + if( isDebugging ) breakpoint(); + return delegate.getParam(paramName, defaultValue); + } + + @Override + public SocketAddress remoteAddress() { + if( isDebugging ) breakpoint(); + return delegate.remoteAddress(); + } + + @Override + public SocketAddress localAddress() { + if( isDebugging ) breakpoint(); + return delegate.localAddress(); + } + + @Override + public SSLSession sslSession() { + if( isDebugging ) breakpoint(); + return delegate.sslSession(); + } + + @Override + public X509Certificate[] peerCertificateChain() throws SSLPeerUnverifiedException { + if( isDebugging ) breakpoint(); + return delegate.peerCertificateChain(); + } + + @Override + public String absoluteURI() { + if( isDebugging ) breakpoint(); + return delegate.absoluteURI(); + } + + @Override + public HttpServerRequest bodyHandler(Handler bodyHandler) { + if( isDebugging ) breakpoint(); + return delegate.bodyHandler(bodyHandler); + } + + @Override + public HttpServerRequest body(Handler> handler) { + if( isDebugging ) breakpoint(); + return delegate.body(handler); + } + + @Override + public Future body() { + if( isDebugging ) breakpoint(); + return delegate.body(); + } + + @Override + public void end(Handler> handler) { + if( isDebugging ) breakpoint(); + delegate.end(handler); + } + + @Override + public Future end() { + if( isDebugging ) breakpoint(); + return delegate.end(); + } + + @Override + public void toNetSocket(Handler> handler) { + if( isDebugging ) breakpoint(); + delegate.toNetSocket(handler); + } + + @Override + public Future toNetSocket() { + if( isDebugging ) breakpoint(); + return delegate.toNetSocket(); + } + + @Override + public HttpServerRequest setExpectMultipart(boolean expect) { + if( isDebugging ) breakpoint(); + return delegate.setExpectMultipart(expect); + } + + @Override + public boolean isExpectMultipart() { + if( isDebugging ) breakpoint(); + return delegate.isExpectMultipart(); + } + + @Override + public HttpServerRequest uploadHandler(Handler uploadHandler) { + if( isDebugging ) breakpoint(); + return delegate.uploadHandler(uploadHandler); + } + + @Override + public MultiMap formAttributes() { + if( isDebugging ) breakpoint(); + return delegate.formAttributes(); + } + + @Override + public String getFormAttribute(String attributeName) { + if( isDebugging ) breakpoint(); + return delegate.getFormAttribute(attributeName); + } + + @Override + public int streamId() { + if( isDebugging ) breakpoint(); + return delegate.streamId(); + } + + @Override + public void toWebSocket(Handler> handler) { + if( isDebugging ) breakpoint(); + delegate.toWebSocket(handler); + } + + @Override + public Future toWebSocket() { + if( isDebugging ) breakpoint(); + return delegate.toWebSocket(); + } + + @Override + public boolean isEnded() { + if( isDebugging ) breakpoint(); + return delegate.isEnded(); + } + + @Override + public HttpServerRequest customFrameHandler(Handler handler) { + if( isDebugging ) breakpoint(); + return delegate.customFrameHandler(handler); + } + + @Override + public HttpConnection connection() { + if( isDebugging ) breakpoint(); + return delegate.connection(); + } + + @Override + public StreamPriority streamPriority() { + if( isDebugging ) breakpoint(); + return delegate.streamPriority(); + } + + @Override + public HttpServerRequest streamPriorityHandler(Handler handler) { + if( isDebugging ) breakpoint(); + return delegate.streamPriorityHandler(handler); + } + + @Override + public DecoderResult decoderResult() { + if( isDebugging ) breakpoint(); + return delegate.decoderResult(); + } + + @Override + public Cookie getCookie(String name) { + if( isDebugging ) breakpoint(); + return delegate.getCookie(name); + } + + @Override + public Cookie getCookie(String name, String domain, String path) { + if( isDebugging ) breakpoint(); + return delegate.getCookie(name, domain, path); + } + + @Override + public int cookieCount() { + if( isDebugging ) breakpoint(); + return delegate.cookieCount(); + } + + @Override + @Deprecated + public Map cookieMap() { + if( isDebugging ) breakpoint(); + return delegate.cookieMap(); + } + + @Override + public Set cookies(String name) { + if( isDebugging ) breakpoint(); + return delegate.cookies(name); + } + + @Override + public Set cookies() { + if( isDebugging ) breakpoint(); + return delegate.cookies(); + } + + @Override + public HttpServerRequest routed(String route) { + if( isDebugging ) breakpoint(); + return delegate.routed(route); + } + + @Override + public Pipe pipe() { + if( isDebugging ) breakpoint(); + return delegate.pipe(); + } + + @Override + public Future pipeTo(WriteStream dst) { + if( isDebugging ) breakpoint(); + return delegate.pipeTo(dst); + } + + @Override + public void pipeTo(WriteStream dst, Handler> handler) { + if( isDebugging ) breakpoint(); + delegate.pipeTo(dst, handler); + } + + @Override + public Context context() { + if( isDebugging ) breakpoint(); + return delegate.context(); + } + + @Override + public Object metric() { + if( isDebugging ) breakpoint(); + return delegate.metric(); + } + +} diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java new file mode 100644 index 0000000..a13a8e2 --- /dev/null +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java @@ -0,0 +1,105 @@ +package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123; + +import io.vertx.core.AsyncResult; +import io.vertx.core.Future; +import io.vertx.core.Handler; +import io.vertx.core.MultiMap; +import io.vertx.core.buffer.Buffer; +import io.vertx.core.http.Cookie; +import io.vertx.core.http.HttpFrame; +import io.vertx.core.http.HttpMethod; +import io.vertx.core.http.HttpServerResponse; +import io.vertx.core.http.StreamPriority; +import io.vertx.core.streams.ReadStream; + +import java.util.Set; + +public class DelegateVertxHttpServerResponse implements HttpServerResponse { + + private final HttpServerResponse delegate; + + public DelegateVertxHttpServerResponse(HttpServerResponse delegate) { + this.delegate = delegate; + } + + @Override public HttpServerResponse exceptionHandler(Handler handler) { return delegate.exceptionHandler(handler); } + @Override public HttpServerResponse setWriteQueueMaxSize(int maxSize) { return delegate.setWriteQueueMaxSize(maxSize); } + @Override public HttpServerResponse drainHandler(Handler handler) { return delegate.drainHandler(handler); } + @Override public int getStatusCode() { return delegate.getStatusCode(); } + @Override public HttpServerResponse setStatusCode(int statusCode) { return delegate.setStatusCode(statusCode); } + @Override public String getStatusMessage() { return delegate.getStatusMessage(); } + @Override public HttpServerResponse setStatusMessage(String statusMessage) { return delegate.setStatusMessage(statusMessage); } + @Override public HttpServerResponse setChunked(boolean chunked) { return delegate.setChunked(chunked); } + @Override public boolean isChunked() { return delegate.isChunked(); } + @Override public MultiMap headers() { return delegate.headers(); } + @Override public HttpServerResponse putHeader(String name, String value) { return delegate.putHeader(name, value); } + @Override public HttpServerResponse putHeader(CharSequence name, CharSequence value) { return delegate.putHeader(name, value); } + @Override public HttpServerResponse putHeader(String name, Iterable values) { return delegate.putHeader(name, values); } + @Override public HttpServerResponse putHeader(CharSequence name, Iterable values) { return delegate.putHeader(name, values); } + @Override public MultiMap trailers() { return delegate.trailers(); } + @Override public HttpServerResponse putTrailer(String name, String value) { return delegate.putTrailer(name, value); } + @Override public HttpServerResponse putTrailer(CharSequence name, CharSequence value) { return delegate.putTrailer(name, value); } + @Override public HttpServerResponse putTrailer(String name, Iterable values) { return delegate.putTrailer(name, values); } + @Override public HttpServerResponse putTrailer(CharSequence name, Iterable value) { return delegate.putTrailer(name, value); } + @Override public HttpServerResponse closeHandler(Handler handler) { return delegate.closeHandler(handler); } + @Override public HttpServerResponse endHandler(Handler handler) { return delegate.endHandler(handler); } + @Override public Future write(String chunk, String enc) { return delegate.write(chunk, enc); } + @Override public void write(String chunk, String enc, Handler> handler) { delegate.write(chunk, enc, handler); } + @Override public Future write(String chunk) { return delegate.write(chunk); } + @Override public void write(String chunk, Handler> handler) { delegate.write(chunk, handler); } + @Override public HttpServerResponse writeContinue() { return delegate.writeContinue(); } + @Override public Future end(String chunk) { return delegate.end(chunk); } + @Override public void end(String chunk, Handler> handler) { delegate.end(chunk, handler); } + @Override public Future end(String chunk, String enc) { return delegate.end(chunk, enc); } + @Override public void end(String chunk, String enc, Handler> handler) { delegate.end(chunk, enc, handler); } + @Override public Future end(Buffer chunk) { return delegate.end(chunk); } + @Override public void end(Buffer chunk, Handler> handler) { delegate.end(chunk, handler); } + @Override public Future end() { return delegate.end(); } + @Override public void send(Handler> handler) { delegate.send(handler); } + @Override public Future send() { return delegate.send(); } + @Override public void send(String body, Handler> handler) { delegate.send(body, handler); } + @Override public Future send(String body) { return delegate.send(body); } + @Override public void send(Buffer body, Handler> handler) { delegate.send(body, handler); } + @Override public Future send(Buffer body) { return delegate.send(body); } + @Override public void send(ReadStream body, Handler> handler) { delegate.send(body, handler); } + @Override public Future send(ReadStream body) { return delegate.send(body); } + @Override public Future sendFile(String filename) { return delegate.sendFile(filename); } + @Override public Future sendFile(String filename, long offset) { return delegate.sendFile(filename, offset); } + @Override public Future sendFile(String filename, long offset, long length) { return delegate.sendFile(filename, offset, length); } + @Override public HttpServerResponse sendFile(String filename, Handler> resultHandler) { return delegate.sendFile(filename, resultHandler); } + @Override public HttpServerResponse sendFile(String filename, long offset, Handler> resultHandler) { return delegate.sendFile(filename, offset, resultHandler); } + @Override public HttpServerResponse sendFile(String filename, long offset, long length, Handler> resultHandler) { return delegate.sendFile(filename, offset, length, resultHandler); } + @Override public void close() { delegate.close(); } + @Override public boolean ended() { return delegate.ended(); } + @Override public boolean closed() { return delegate.closed(); } + @Override public boolean headWritten() { return delegate.headWritten(); } + @Override public HttpServerResponse headersEndHandler(Handler handler) { return delegate.headersEndHandler(handler); } + @Override public HttpServerResponse bodyEndHandler(Handler handler) { return delegate.bodyEndHandler(handler); } + @Override public long bytesWritten() { return delegate.bytesWritten(); } + @Override public int streamId() { return delegate.streamId(); } + @Override public HttpServerResponse push(HttpMethod method, String host, String path, Handler> handler) { return delegate.push(method, host, path, handler); } + @Override public Future push(HttpMethod method, String host, String path) { return delegate.push(method, host, path); } + @Override public HttpServerResponse push(HttpMethod method, String path, MultiMap headers, Handler> handler) { return delegate.push(method, path, headers, handler); } + @Override public Future push(HttpMethod method, String path, MultiMap headers) { return delegate.push(method, path, headers); } + @Override public HttpServerResponse push(HttpMethod method, String path, Handler> handler) { return delegate.push(method, path, handler); } + @Override public Future push(HttpMethod method, String path) { return delegate.push(method, path); } + @Override public HttpServerResponse push(HttpMethod method, String host, String path, MultiMap headers, Handler> handler) { return delegate.push(method, host, path, headers, handler); } + @Override public Future push(HttpMethod method, String host, String path, MultiMap headers) { return delegate.push(method, host, path, headers); } + @Override public boolean reset() { return delegate.reset(); } + @Override public boolean reset(long code) { return delegate.reset(code); } + @Override public HttpServerResponse writeCustomFrame(int type, int flags, Buffer payload) { return delegate.writeCustomFrame(type, flags, payload); } + @Override public HttpServerResponse writeCustomFrame(HttpFrame frame) { return delegate.writeCustomFrame(frame); } + @Override public HttpServerResponse setStreamPriority(StreamPriority streamPriority) { return delegate.setStreamPriority(streamPriority); } + @Override public HttpServerResponse addCookie(Cookie cookie) { return delegate.addCookie(cookie); } + @Override public Cookie removeCookie(String name) { return delegate.removeCookie(name); } + @Override public Cookie removeCookie(String name, boolean invalidate) { return delegate.removeCookie(name, invalidate); } + @Override public Set removeCookies(String name) { return delegate.removeCookies(name); } + @Override public Set removeCookies(String name, boolean invalidate) { return delegate.removeCookies(name, invalidate); } + @Override public Cookie removeCookie(String name, String domain, String path) { return delegate.removeCookie(name, domain, path); } + @Override public Cookie removeCookie(String name, String domain, String path, boolean invalidate) { return delegate.removeCookie(name, domain, path, invalidate); } + @Override public Future write(Buffer data) { return delegate.write(data); } + @Override public void write(Buffer data, Handler> handler) { delegate.write(data, handler); } + @Override public void end(Handler> handler) { delegate.end(handler); } + @Override public boolean writeQueueFull() { return delegate.writeQueueFull(); } + +} diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java new file mode 100644 index 0000000..4eb1e20 --- /dev/null +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java @@ -0,0 +1,129 @@ +package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123; + +import io.vertx.core.http.HttpServerRequest; +import io.vertx.ext.web.RoutingContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +public class Foo { + + private static final Logger log = Foo.getLogger(Foo.class); + private static final boolean assertRequestEquality = true; + private static HttpServerRequest serverInfoRequest; + private static io.vertx.core.http.impl.HttpServerRequestInternal restStorageEvBusAdaptMappdHttpServReq; + private static long onBeginRouteEpochMs; + + public static synchronized void onNewServerInfoRequst(HttpServerRequest request){ + if( !isServerInfoRequst(request) ) return; + //assert serverInfoRequest == null; + log.trace("onNewServerInfoRequst()"); + serverInfoRequest = request; + } + + public static void downReqBegin(HttpServerRequest req) { + if( !isServerInfoRequst(req) ) return; + log.trace("downReqBegin()"); + assert !assertRequestEquality || serverInfoRequest == req; + } + + public static void downReqAuthorized(HttpServerRequest req) { + if( !isServerInfoRequst(req) ) return; + log.trace("downReqAuthorized()"); + assert !assertRequestEquality || serverInfoRequest == req; + } + + public static void onBeforeMainVerticleRouteGeneric(HttpServerRequest req) { + if( !isServerInfoRequst(req) ) return; + log.trace("onBeforeMainVerticleRouteGeneric()"); + onBeginRouteEpochMs = System.currentTimeMillis(); + assert !assertRequestEquality || serverInfoRequest == req; + } + + public static Logger getLogger(Class clazz) { + assert clazz != null; + return getLogger(clazz.getName()); + } + + public static Logger getLogger(String name) { + assert name != null; + return LoggerFactory.getLogger("FOO."+ name); + } + + public static boolean isServerInfoRequst(HttpServerRequest request) { + return isServerInfoRequst(request.uri()); + } + + private static boolean isServerInfoRequst(String uri) { + assert uri != null; + assert uri.startsWith("/"); + try{ + if( "/houston/server/info".equals(uri) ){ + //log.trace("true <- isServerInfoRequst({})", uri); + return true; + } + //log.trace("false <- isServerInfoRequst({})", uri); + return false; + }catch(Throwable ex){ + assert false; + throw ex; + } + } + + public static void onBeforeEvBusAdapterDataHandler(String uri) { + if( !isServerInfoRequst(uri) ) return; + log.trace("onBeforeEvBusAdapterDataHandler({})", uri); + assert false; + } + + public static void onBeforeEvBusAdapterEndHandler(String uri) { + if( !isServerInfoRequst(uri)) return; + log.trace("onBeforeEvBusAdapterEndHandler({})", uri); + assert false; + } + + public static void onEvBusAdapterHandle(io.vertx.core.http.impl.HttpServerRequestInternal req) { + if( !isServerInfoRequst(req.uri()) ) return; + assert !assertRequestEquality || serverInfoRequest != req; + assert restStorageEvBusAdaptMappdHttpServReq == null; + log.trace("onEvBusAdapterHandle({})", req.uri()); + restStorageEvBusAdaptMappdHttpServReq = req; + } + + public static void onEvBusAdapterError(Throwable ex) { + log.error("onEvBusAdapterError()", new Exception("stacktrace", ex)); + } + + public static void onRestStorageHandlerHandle(HttpServerRequest req) { + if( !isServerInfoRequst(req) ) return; + log.trace("onRestStorageHandlerHandle({})", req.uri()); + assert !assertRequestEquality || serverInfoRequest == req; + } + + public static void onRestStorageHandler_getResource(io.vertx.ext.web.RoutingContext ctx) { + if( !isServerInfoRequst(ctx.request()) ) return; + assert !assertRequestEquality || serverInfoRequest == ctx.request(); + log.trace("onRestStorageHandler_getResource({})", ctx.request().uri()); + } + + public static void onRestStorageHandler_getResource_before_storage_get(String path, int offset, int limit) { + //log.trace("onRestStorageHandler_getResource_before_storage_get({}, {}, {})", path, offset, limit); + } + + public static void onRestStorageHandler_getResource_after_storage_get(String path, int offset, int limit, Object/*org.swisspush.reststorage.Resource*/ resource) { + //log.trace("onRestStorageHandler_getResource_after_storage_get({})", path); + } + + public static void onGetHoustonServerInfo(RoutingContext ctx) { + var req = ctx.request(); + log.trace("onGetHoustonServerInfo({})", req.uri()); + assert !assertRequestEquality || serverInfoRequest != req; + } + + public static void onEndCompleted(long responseBegEpochMs){ + long durationMs = System.currentTimeMillis() - responseBegEpochMs; + log.debug("Request took {}ms", durationMs); + } + +} + -- cgit v1.1 From 24597193f73e11043aa04d1cdc0f59f6317865b0 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 23 Jan 2024 16:52:29 +0100 Subject: Add doc about msys broken paths. Move MshitTeams doc. --- doc/note/msteams/msteams.txt | 30 ------------------------------ doc/note/windoof/msteams.txt | 30 ++++++++++++++++++++++++++++++ doc/note/windoof/msys-path-issue.txt | 3 +++ 3 files changed, 33 insertions(+), 30 deletions(-) delete mode 100644 doc/note/msteams/msteams.txt create mode 100644 doc/note/windoof/msteams.txt create mode 100644 doc/note/windoof/msys-path-issue.txt diff --git a/doc/note/msteams/msteams.txt b/doc/note/msteams/msteams.txt deleted file mode 100644 index 216c9ce..0000000 --- a/doc/note/msteams/msteams.txt +++ /dev/null @@ -1,30 +0,0 @@ - -Mdoof Teams -=========== - -## Notify the-other-way-around - -Stop using products the wrong way around please! An instant messenger is NOT -the same as an E-Mail box. - ------------------------------------------------------------------------- -https://outlook.office.com/mail -https://alternativeto.net/software/microsoft-office-outlook/?license=opensource -https://www.gnu.org/proprietary/malware-microsoft.html ------------------------------------------------------------------------- -Messages updated. Microsoft Teams - -Hi, ___ -Your teammates are trying to reach you via E-Mail. -Andreas sent a message via E-Mail -___ -See More - -Install whatever E-Mail client you wish without any shiny vendor lockins. - -iOS Icon - Android Icon -This message was sent from an unmonitored teams chat. -© 2023 Microsoft Corporation, One Microsoft Way, Redmond WA 98052-7329 -Read our privacy policy ------------------------------------------------------------------------- - diff --git a/doc/note/windoof/msteams.txt b/doc/note/windoof/msteams.txt new file mode 100644 index 0000000..216c9ce --- /dev/null +++ b/doc/note/windoof/msteams.txt @@ -0,0 +1,30 @@ + +Mdoof Teams +=========== + +## Notify the-other-way-around + +Stop using products the wrong way around please! An instant messenger is NOT +the same as an E-Mail box. + +------------------------------------------------------------------------ +https://outlook.office.com/mail +https://alternativeto.net/software/microsoft-office-outlook/?license=opensource +https://www.gnu.org/proprietary/malware-microsoft.html +------------------------------------------------------------------------ +Messages updated. Microsoft Teams + +Hi, ___ +Your teammates are trying to reach you via E-Mail. +Andreas sent a message via E-Mail +___ +See More + +Install whatever E-Mail client you wish without any shiny vendor lockins. + +iOS Icon - Android Icon +This message was sent from an unmonitored teams chat. +© 2023 Microsoft Corporation, One Microsoft Way, Redmond WA 98052-7329 +Read our privacy policy +------------------------------------------------------------------------ + diff --git a/doc/note/windoof/msys-path-issue.txt b/doc/note/windoof/msys-path-issue.txt new file mode 100644 index 0000000..0b49323 --- /dev/null +++ b/doc/note/windoof/msys-path-issue.txt @@ -0,0 +1,3 @@ + +MSYS_NO_PATHCONV=1 + -- cgit v1.1 From 4a9cd4ad05b6a4c726abb5cfe1997ac40c8f3948 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 24 Jan 2024 12:01:14 +0100 Subject: No idea why wireshark suddenly refuses to start due to this name --- src/main/lua/wireshark/HttpTime.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/lua/wireshark/HttpTime.lua b/src/main/lua/wireshark/HttpTime.lua index b06c0a7..514c62b 100644 --- a/src/main/lua/wireshark/HttpTime.lua +++ b/src/main/lua/wireshark/HttpTime.lua @@ -10,7 +10,7 @@ local mod = {} function mod.init() local that = mod.seal{ - proto = Proto("__", "Additional Metadata"), + proto = Proto("AdditMeta", "Additional Metadata"), f_andy_httpTime = ProtoField.float("_.httpTime", "HttpTime"), f_andy_synSeen = ProtoField.bool("_.synSeen", "SynSeen"), f_andy_uri = ProtoField.string("_.uri", "Request URI"), -- cgit v1.1 From e068e9e4802fcd3adc453124846e1251830b0f74 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 25 Jan 2024 14:20:16 +0100 Subject: Some OpenSh*t debugging. --- doc/note/curl/curl.txt | 4 +- doc/note/links/links.txt | 3 +- doc/note/openshift/openshift.txt | 12 ++++- doc/note/tcpdump/tcpdump.txt | 14 ++++++ src/main/lua/pcap/KubeProbeFilter.lua | 93 +++++++++++++++++++++++++++++++++++ 5 files changed, 120 insertions(+), 6 deletions(-) create mode 100644 src/main/lua/pcap/KubeProbeFilter.lua diff --git a/doc/note/curl/curl.txt b/doc/note/curl/curl.txt index b5a3556..fe0302b 100644 --- a/doc/note/curl/curl.txt +++ b/doc/note/curl/curl.txt @@ -2,9 +2,7 @@ ## Timing - curl example.com -w "\n\nconnect=%{time_connect}s, trsf=%{time_starttransfer}s, totl=%{time_total}s" - - + curl example.com -w "\n\nconnect=%{time_connect}s, trsf=%{time_starttransfer}s, totl=%{time_total}s\n" ## Sources diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 8f7462c..08ecbe4 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -239,7 +239,7 @@ Links (Aka argument amplifiers) ## Performance DOES matter - "https://github.com/swisspush/gateleen/pull/456#discussion_r844865066" - [Performance Excuses Debunked](https://m.youtube.com/watch?v=x2EOOJg8FkA) -- [Frameworks & clusters do not solve it](https://www.youtube.com/watch?v=EpYr3T5VP6w&t=1109) +- [Is writing performant code too expensive?](https://www.youtube.com/watch?v=EpYr3T5VP6w&t=1109) - [Simple Code, High Performance](https://m.youtube.com/watch?v=Ge3aKEmZcqY&t=78) - [Houston Last führt zu Neustart](https://wikit.post.ch/x/HDV8T) - [Houston storage request timed out large json](https://jira.post.ch/browse/SDCISA-11294) @@ -252,6 +252,7 @@ Links (Aka argument amplifiers) - [2023-10-27 OOM nun auch auf Eagle](https://wikit.post.ch/x/c2U1Tw) - [Fahrplanimports slow](https://jira.post.ch/browse/SDCISA-11528) - [Jenkinsbuild too slow](https://jira.post.ch/browse/SDCISA-14313?focusedId=1914236&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1914236) +- [Houston check too slow](https://jira.post.ch/browse/SDCISA-13746?focusedId=1937167&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1937167) ## Performance is not an issue ... - [Houston OOM 2023-06-27](https://wikit.post.ch/x/_Bv6Rw) diff --git a/doc/note/openshift/openshift.txt b/doc/note/openshift/openshift.txt index e807da3..48fda8f 100644 --- a/doc/note/openshift/openshift.txt +++ b/doc/note/openshift/openshift.txt @@ -79,8 +79,16 @@ HINT: ALL files from Current dir (.) will get uploaded (when global rsync not av oc scale dc/preflux --replicas=1 -## TODO what was this for? - oc get pvc + +## Kube Probe + + echo && ocprod exec -ti "$(ocprod get pods|egrep ston-[0-9]|cut -f1 -d' ')" -- sh -c 'true \ + && printf "c; When ; rsp_code; time_connect; time_redirect; time_starttransfer; time_total\n" \ + && while true; do true \ + && now=$(date -uIs) \ + && curl -sS -o/dev/null -w "r; $(date -uIs); %{response_code}; %{time_connect}s; %{time_redirect}s; %{time_starttransfer}s; %{time_total}s\n" 127.0.0.1:7012/houston/server/info \ + && sleep 5 || break \ + ;done' | tee -a C:/work/tmp/houston-prod-inPod-probe.log diff --git a/doc/note/tcpdump/tcpdump.txt b/doc/note/tcpdump/tcpdump.txt index 7df4335..b7bdd6d 100644 --- a/doc/note/tcpdump/tcpdump.txt +++ b/doc/note/tcpdump/tcpdump.txt @@ -62,6 +62,15 @@ Tips: "https://chrissanders.org/2018/06/large-captures4-filter-whittling/" "net 172.18.0.0/16" +## Filter kube-probes "GET /houston/server/info" or '{"name":"houston",' + + tcpdump -nni any -w /tmp/houston-${PAISA_ENV:?}-tcp-$(date -u +%Y%m%d-%H%M%SZ)-%s.pcap -C 8M -W 99 -G 600 "(tcp[((tcp[12:1]&0xf0)>>2)+0:4] = 0x47455420 && tcp[((tcp[12:1]&0xf0)>>2)+4:4] = 0x2F686F75 && tcp[((tcp[12:1]&0xf0)>>2)+8:4] = 0x73746F6E && tcp[((tcp[12:1]&0xf0)>>2)+12:4] = 0x2F736572 && tcp[((tcp[12:1]&0xf0)>>2)+16:4] = 0x7665722F && tcp[((tcp[12:1]&0xf0)>>2)+20:4] = 0x696E666F && tcp[((tcp[12:1]&0xf0)>>2)+24:1] = 0x20) or (tcp[((tcp[12:1]&0xf0)>>2)+115:4] = 0x7B226E61 && tcp[((tcp[12:1]&0xf0)>>2)+119:4] = 0x6D65223A && tcp[((tcp[12:1]&0xf0)>>2)+123:4] = 0x22686F75 && tcp[((tcp[12:1]&0xf0)>>2)+127:4] = 0x73746F6E && tcp[((tcp[12:1]&0xf0)>>2)+131:2] = 0x222C)" + + +## Try dump kube-probes fully + + tcpdump -nni any -w /tmp/houston-${PAISA_ENV:?}-tcp-$(date -u +%Y%m%d-%H%M%SZ)-%s.pcap -C 8M -W 99 -G 600 "host 10.127.77.1 and port 7012" + ## Extract hosts file from DNS traffic @@ -71,3 +80,8 @@ Not perfect because needs manual fine-tuning. But can be helpful anyway. Vielleicht auch mal option "-zhosts" ausprobieren. Sollte auch sowas tun. + +[man tcpdump](https://www.tcpdump.org/manpages/tcpdump.1.html) +[no name port numbers rhel patch](https://superuser.com/a/587304/1123359) +[complex filter by byte contents](https://security.stackexchange.com/a/121013/179017) + diff --git a/src/main/lua/pcap/KubeProbeFilter.lua b/src/main/lua/pcap/KubeProbeFilter.lua new file mode 100644 index 0000000..a5967e9 --- /dev/null +++ b/src/main/lua/pcap/KubeProbeFilter.lua @@ -0,0 +1,93 @@ +-- +-- Try to extract kube-probe related requests. +-- + +local newPcapParser = assert(require("pcapit").newPcapParser) +local newPcapDumper = assert(require("pcapit").newPcapDumper) + +local out, log = io.stdout, io.stderr +local main, onPcapFrame, vapourizeUrlVariables + + +function onPcapFrame( app, it ) + local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort() + local userAgent, reqUri + -- + if dstPort ~= 7012 and srcPort ~= 7012 then return end + local trspPayload = it:trspPayload() + local httpReqLinePart1, httpReqLinePart2, httpReqLinePart3 = + trspPayload:match("^([A-Z/1.0]+) ([^ ]+) ([^ \r\n]+)\r?\n") + if httpReqLinePart1 and not httpReqLinePart1:find("^HTTP/1.%d$") then -- assume HTTP request + reqUri = httpReqLinePart2 + userAgent = trspPayload:match("\n[Uu][Ss][Ee][Rr]%-[Aa][Gg][Ee][Nn][Tt]:%s+([^\r\n]+)\r?\n"); + if userAgent then + --if not userAgent:find("^kube%-probe/") then return end -- assume halfrunt + --log:write("User-Agent: ".. userAgent .."\n") + end + elseif httpReqLinePart1 then -- assume HTTP response + --out:write(trspPayload) + end + local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr() + local connKey = ((srcPort < dstPort)and(srcPort.."\0"..dstPort)or(dstPort.."\0"..srcPort)) + .."\0"..((srcIp < dstIp)and(srcIp.."\0"..dstIp)or(dstIp.."\0"..srcIp)) + local conn = app.connections[connKey] + if not conn then conn = {isOfInterest=false, pkgs={}} app.connections[connKey] = conn end + conn.isOfInterest = (conn.isOfInterest or reqUri == "/houston/server/info") + if not conn.isOfInterest then + if #conn.pkgs > 3 then -- Throw away all stuff except TCP handshake + conn.pkgs = { conn.pkgs[1], conn.pkgs[2], conn.pkgs[3] } + end + local sec, usec = it:frameArrivalTime() + --for k,v in pairs(getmetatable(it))do print("E",k,v)end + local pkg = { + sec = assert(sec), usec = assert(usec), + caplen = it:frameCaplen(), len = it:frameLen(), + tcpFlags = (conn.isOfInterest)and(it:tcpFlags())or false, + srcPort = srcPort, dstPort = dstPort, + trspPayload = trspPayload, + rawFrame = it:rawFrame(), + } + table.insert(conn.pkgs, pkg) + else + -- Stop memory hogging. Write that stuff to output + if #conn.pkgs > 0 then + for _, pkg in ipairs(conn.pkgs) do + --out:write(string.format("-- PKG 1 %d->%d %d.%09d tcpFlg=0x%04X\n", pkg.srcPort, pkg.dstPort, pkg.sec, pkg.usec, pkg.tcpFlags or 0)) + --out:write(pkg.trspPayload) + --out:write("\n") + app.dumper:dump(pkg.sec, pkg.usec, pkg.caplen, pkg.len, pkg.rawFrame, 1, pkg.rawFrame:len()) + end + conn.pkgs = {} + end + local tcpFlags = it:tcpFlags() + local sec, usec = it:frameArrivalTime() + local rawFrame = it:rawFrame() + --out:write(string.format("-- PKG 2 %d->%d %d.%09d tcpFlg=0x%04X, len=%d\n", srcPort, dstPort, sec, usec, tcpFlags or 0, trspPayload:len())) + --out:write(trspPayload) + --if trspPayload:byte(trspPayload:len()) ~= 0x0A then out:write("\n") end + --out:write("\n") + app.dumper:dump(sec, usec, it:frameCaplen(), it:frameLen(), rawFrame, 1, rawFrame:len()) + end +end + + +function main() + local app = { + parser = false, + dumper = false, + connections = {}, + } + app.parser = newPcapParser{ + dumpFilePath = "-", + onFrame = function(f)onPcapFrame(app, f)end, + } + app.dumper = newPcapDumper{ + dumpFilePath = "C:/work/tmp/KubeProbeFilter.out.pcap", + } + app.parser:resume() +end + + +main() + + -- cgit v1.1 From 019e57b637506a2e546c9eff71d8641af7027afb Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 25 Jan 2024 17:10:36 +0100 Subject: Update tcpdump from error hunting. --- doc/note/links/links.txt | 3 +++ doc/note/tcpdump/tcpdump.txt | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 08ecbe4..d187788 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -573,3 +573,6 @@ Links (Aka argument amplifiers) [About TLS in isa](https://jira.post.ch/browse/SDCISA-14330?focusedId=1925001&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1925001) +## Tools like tcpdump are incredibly important +- [tcpdump discovers the truth once more](https://jira.post.ch/browse/SDCISA-13746?focusedId=1939377&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1939377) + diff --git a/doc/note/tcpdump/tcpdump.txt b/doc/note/tcpdump/tcpdump.txt index b7bdd6d..71ffb02 100644 --- a/doc/note/tcpdump/tcpdump.txt +++ b/doc/note/tcpdump/tcpdump.txt @@ -69,7 +69,7 @@ Tips: "https://chrissanders.org/2018/06/large-captures4-filter-whittling/" ## Try dump kube-probes fully - tcpdump -nni any -w /tmp/houston-${PAISA_ENV:?}-tcp-$(date -u +%Y%m%d-%H%M%SZ)-%s.pcap -C 8M -W 99 -G 600 "host 10.127.77.1 and port 7012" + timeout --foreground 900 tcpdump -nni any -w /tmp/houston-${PAISA_ENV:?}-tcp-$(date -u +%Y%m%d-%H%M%SZ)-%s.pcap -C 42M -W 42 -G 600 "host 10.127.73.1 and port 7012" ## Extract hosts file from DNS traffic -- cgit v1.1 From 545ab4d3eade7c413345ab39e129f2d342d70203 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 26 Jan 2024 17:25:37 +0100 Subject: Migrate bashrc and inputrc from dotfiles to here. --- doc/note/bash/bashrc | 53 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/note/bash/inputrc | 4 ++++ 2 files changed, 57 insertions(+) create mode 100644 doc/note/bash/bashrc create mode 100644 doc/note/bash/inputrc diff --git a/doc/note/bash/bashrc b/doc/note/bash/bashrc new file mode 100644 index 0000000..ca3aaa2 --- /dev/null +++ b/doc/note/bash/bashrc @@ -0,0 +1,53 @@ + +WINDOOF=$(if [ -d /c/Windows ]; then echo true; else echo false; fi) + +# Disable annoying "features", so that exclamation marks become usable again. +set +o histexpand + +# Do NOT store duplicates in history. Do NOT store in history if +# starts-with-space. +HISTCONTROL=ignoreboth + +if [ $SHLVL -eq 1 ]; then + set -o ignoreeof # Require explicit 'exit' cmd to exit shell. +else + set +o ignoreeof +fi + +export PS1='\033[1;32m[\033[0m$? \033[1;30m\u\033[0m\033[1;32m@\033[1;30m\h \033[1;34m\w\033[1;32m]\033[0m\n\$ ' + +# Add global node modules to path +#PATH=/opt/node-6.10.1/lib/node_modules/.bin:$PATH +# bash completion for npm +#source /opt/node-6.10.1/etc/npm-completion.sh + +if test -d ~/.local/bin; then export PATH=~/.local/bin:$PATH; fi + +############################################################################### +# +# Auto-launching ssh-agent on Git for Windoofs +# (See: https://docs.github.com/en/github/authenticating-to-github/working-with-ssh-key-passphrases#auto-launching-ssh-agent-on-git-for-windows) +# +if $WINDOOF; then + env=~/.ssh/agent.env + + agent_load_env () { test -f "$env" && . "$env" >| /dev/null ; } + agent_start () { (umask 077; ssh-agent >| "$env"); . "$env" >| /dev/null ; } + + agent_load_env + + # agent_run_state: 0=agent running w/ key; 1=agent w/o key; 2= agent not running + agent_run_state=$(ssh-add -l >| /dev/null 2>&1; echo $?) + + if [ ! "$SSH_AUTH_SOCK" ] || [ $agent_run_state = 2 ]; then + agent_start + #ssh-add + #elif [ "$SSH_AUTH_SOCK" ] && [ $agent_run_state = 1 ]; then + # ssh-add + fi + + unset env +fi +# +############################################################################### + diff --git a/doc/note/bash/inputrc b/doc/note/bash/inputrc new file mode 100644 index 0000000..df82709 --- /dev/null +++ b/doc/note/bash/inputrc @@ -0,0 +1,4 @@ + +set colored-completion-prefix on +set colored-stats off + -- cgit v1.1 From d796b639498730f96f4f7f221ad39b899af61b6c Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sat, 27 Jan 2024 23:17:43 +0100 Subject: Tinker with nginx and wondoof in qemu. --- doc/note/nginx/nginx-wdoof.txt | 1 + doc/note/nginx/nginx.txt | 36 +++++++++++++++++++++++++++++++++++ doc/note/qemu/qemu.txt | 2 +- doc/note/qemu/setup-windoof.txt | 24 +++++++++++++++++++++++ doc/note/windoof/kill-auto-update.txt | 6 +++--- 5 files changed, 65 insertions(+), 4 deletions(-) create mode 100644 doc/note/qemu/setup-windoof.txt diff --git a/doc/note/nginx/nginx-wdoof.txt b/doc/note/nginx/nginx-wdoof.txt index 2bf7a52..cc9c23d 100644 --- a/doc/note/nginx/nginx-wdoof.txt +++ b/doc/note/nginx/nginx-wdoof.txt @@ -23,6 +23,7 @@ http { sendfile on; keepalive_timeout 65; server { + # For public access use "8080" and "[::]:8080" listen 127.0.0.1:8080; server_name localhost; location / { diff --git a/doc/note/nginx/nginx.txt b/doc/note/nginx/nginx.txt index 2a15ae5..d5ce9db 100644 --- a/doc/note/nginx/nginx.txt +++ b/doc/note/nginx/nginx.txt @@ -1,4 +1,40 @@ +## Basic nginx config + +[looks promising](https://stackoverflow.com/a/73297125/4415884) + + #daemon off; # run in foreground (eg from cli) + events {} + pid nginx.pid; + http { + access_log /dev/stdout; + # Directories nginx needs configured to start up. + client_body_temp_path .; + proxy_temp_path .; + fastcgi_temp_path .; + uwsgi_temp_path .; + scgi_temp_path .; + server { + # For public access use "80" and "[::]:80" + listen 127.0.0.1:80; + listen [::1]:80; + server_name localhost; + root /srv/www + location /foo { + try_files $uri $uri/ =404; + } + location /example { + autoindex on; # enable directory listing + return 200 "Example says hi"; + } + } + + } + +[tutorial](https://www.javatpoint.com/nginx-minimal-configuration) + + + ## fCGI keep alive backend connections upstream myFancyBackend { diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 7e90598..b7d9f89 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -43,7 +43,7 @@ qemu-system-x86_64 \ -cdrom "path/to/cd.iso" \ -hda "$(dirname "$(realpath "$0")")/hda.qcow2" \ `# Isolated Network plus host port/cmd reachable from guest` \ - -netdev 'user,id=n1,ipv6=off,restrict=y,guestfwd=guestfwd=tcp:10.0.2.9:80-cmd:ncat 127.0.0.1 80' \ + -netdev 'user,id=n1,ipv6=off,restrict=y,guestfwd=tcp:10.0.2.9:80-cmd:ncat 127.0.0.1 80' \ -device e1000,netdev=n1 \ `# 10.0.2.x network with host redirect` \ -netdev user,id=n0,ipv6=off,hostfwd=tcp:127.0.0.1:${SSH_PORT:-2222}-:22 \ diff --git a/doc/note/qemu/setup-windoof.txt b/doc/note/qemu/setup-windoof.txt new file mode 100644 index 0000000..f0be243 --- /dev/null +++ b/doc/note/qemu/setup-windoof.txt @@ -0,0 +1,24 @@ + +Setup Windoof in a experiment VM +================================ + +Install system STRICTLY WITHOUT internet connection. + +Prepare "../windoof/kill-auto-update.txt" on webserver via restricted net. Make +sure to use windoof newlines (CRLF). + +curl -sSD- http://10.0.2.9:80/kill-auto-update.reg -O + +Run "reg" file to disable annoying updated. After that, we can reboot windoof +with internet connection. + +Install: +- "https://notepad-plus-plus.org/" + +Install maybe: +- "https://www.mozilla.org/en-US/firefox/windows/" + +Manually trigger updates, reboot, updates, reboot, ... (likely some more turns). + +SDelete.exe -nobanner -z C: + diff --git a/doc/note/windoof/kill-auto-update.txt b/doc/note/windoof/kill-auto-update.txt index 3ccb20a..d6b896f 100644 --- a/doc/note/windoof/kill-auto-update.txt +++ b/doc/note/windoof/kill-auto-update.txt @@ -11,8 +11,8 @@ unneccesarily tedious. ## Stop annoying updates -Windows Registry Editor Version 5.00 -[HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\WindowsUpdate\AU] -"NoAutoUpdate"=dword:00000001 +Windows Registry Editor Version 5.00 +[HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\WindowsUpdate\AU] +"NoAutoUpdate"=dword:00000001 -- cgit v1.1 From b8805759f661f9561162f07af6e3ef08590cde21 Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Sat, 27 Jan 2024 23:35:38 +0100 Subject: qemu windoof setup cleanup --- doc/note/qemu/qemu.txt | 2 +- doc/note/qemu/setup-windoof.txt | 33 +++++++++++++++++++++++---------- 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index b7d9f89..b267698 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -24,7 +24,7 @@ Qemu ### Create Standalone image based on snapshot image qemu-img convert -O qcow2 derived.qcow2 standalone.qcow2 -## Shrink img +## Shrink/compact img Normal systems: qemu-img convert -O qcow2 input.qcow2 output.qcow2 diff --git a/doc/note/qemu/setup-windoof.txt b/doc/note/qemu/setup-windoof.txt index f0be243..c301416 100644 --- a/doc/note/qemu/setup-windoof.txt +++ b/doc/note/qemu/setup-windoof.txt @@ -4,21 +4,34 @@ Setup Windoof in a experiment VM Install system STRICTLY WITHOUT internet connection. -Prepare "../windoof/kill-auto-update.txt" on webserver via restricted net. Make -sure to use windoof newlines (CRLF). +Stop annoying windoof auto updates. Make sure to use windoof newlines (CRLF) in +the reg file: -curl -sSD- http://10.0.2.9:80/kill-auto-update.reg -O -Run "reg" file to disable annoying updated. After that, we can reboot windoof -with internet connection. +Windows Registry Editor Version 5.00 +[HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\WindowsUpdate\AU] +"NoAutoUpdate"=dword:00000001 -Install: +curl -sSD- http://10.0.2.9:80/stop-annoying-updates.reg -O + +After running "reg" file, we can reboot windoof with internet connection. + +Download/Install: +- "https://learn.microsoft.com/en-us/sysinternals/downloads/sdelete" - "https://notepad-plus-plus.org/" -Install maybe: -- "https://www.mozilla.org/en-US/firefox/windows/" +Install needed software (Maybe: firefox, MsOffice, MsTeams, ..?). + +Manually trigger updates, reboot, updates, reboot, (likely some more turns ...) + +Make sure no more updates are running. Then, I guess best is to reboot without +internet access once more to cleanup the disk: + + SDelete.exe -nobanner -z C: + +Shutdown guest, then compact disc. -Manually trigger updates, reboot, updates, reboot, ... (likely some more turns). + qemu-img convert -O qcow2 input.qcow output.qcow2 -SDelete.exe -nobanner -z C: +Image is ready. -- cgit v1.1 From 8d0b4059babed7c00ea3f4cfec6a3848379a5fc3 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 30 Jan 2024 17:51:38 +0100 Subject: Update gateleen-kludge --- .../DelegateVertxHttpServerResponse.java | 166 +++++++++++---------- .../gateleenKludge/tmoutissue20240123/Foo.java | 8 +- 2 files changed, 91 insertions(+), 83 deletions(-) diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java index a13a8e2..87ce5a9 100644 --- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java @@ -11,95 +11,101 @@ import io.vertx.core.http.HttpMethod; import io.vertx.core.http.HttpServerResponse; import io.vertx.core.http.StreamPriority; import io.vertx.core.streams.ReadStream; +import org.slf4j.Logger; import java.util.Set; +import static org.slf4j.LoggerFactory.getLogger; + public class DelegateVertxHttpServerResponse implements HttpServerResponse { + private static final Logger log = getLogger(DelegateVertxHttpServerResponse.class); private final HttpServerResponse delegate; + private final String dbgHint; - public DelegateVertxHttpServerResponse(HttpServerResponse delegate) { + public DelegateVertxHttpServerResponse(String debugHint, HttpServerResponse delegate) { + this.dbgHint = debugHint; this.delegate = delegate; } - @Override public HttpServerResponse exceptionHandler(Handler handler) { return delegate.exceptionHandler(handler); } - @Override public HttpServerResponse setWriteQueueMaxSize(int maxSize) { return delegate.setWriteQueueMaxSize(maxSize); } - @Override public HttpServerResponse drainHandler(Handler handler) { return delegate.drainHandler(handler); } - @Override public int getStatusCode() { return delegate.getStatusCode(); } - @Override public HttpServerResponse setStatusCode(int statusCode) { return delegate.setStatusCode(statusCode); } - @Override public String getStatusMessage() { return delegate.getStatusMessage(); } - @Override public HttpServerResponse setStatusMessage(String statusMessage) { return delegate.setStatusMessage(statusMessage); } - @Override public HttpServerResponse setChunked(boolean chunked) { return delegate.setChunked(chunked); } - @Override public boolean isChunked() { return delegate.isChunked(); } - @Override public MultiMap headers() { return delegate.headers(); } - @Override public HttpServerResponse putHeader(String name, String value) { return delegate.putHeader(name, value); } - @Override public HttpServerResponse putHeader(CharSequence name, CharSequence value) { return delegate.putHeader(name, value); } - @Override public HttpServerResponse putHeader(String name, Iterable values) { return delegate.putHeader(name, values); } - @Override public HttpServerResponse putHeader(CharSequence name, Iterable values) { return delegate.putHeader(name, values); } - @Override public MultiMap trailers() { return delegate.trailers(); } - @Override public HttpServerResponse putTrailer(String name, String value) { return delegate.putTrailer(name, value); } - @Override public HttpServerResponse putTrailer(CharSequence name, CharSequence value) { return delegate.putTrailer(name, value); } - @Override public HttpServerResponse putTrailer(String name, Iterable values) { return delegate.putTrailer(name, values); } - @Override public HttpServerResponse putTrailer(CharSequence name, Iterable value) { return delegate.putTrailer(name, value); } - @Override public HttpServerResponse closeHandler(Handler handler) { return delegate.closeHandler(handler); } - @Override public HttpServerResponse endHandler(Handler handler) { return delegate.endHandler(handler); } - @Override public Future write(String chunk, String enc) { return delegate.write(chunk, enc); } - @Override public void write(String chunk, String enc, Handler> handler) { delegate.write(chunk, enc, handler); } - @Override public Future write(String chunk) { return delegate.write(chunk); } - @Override public void write(String chunk, Handler> handler) { delegate.write(chunk, handler); } - @Override public HttpServerResponse writeContinue() { return delegate.writeContinue(); } - @Override public Future end(String chunk) { return delegate.end(chunk); } - @Override public void end(String chunk, Handler> handler) { delegate.end(chunk, handler); } - @Override public Future end(String chunk, String enc) { return delegate.end(chunk, enc); } - @Override public void end(String chunk, String enc, Handler> handler) { delegate.end(chunk, enc, handler); } - @Override public Future end(Buffer chunk) { return delegate.end(chunk); } - @Override public void end(Buffer chunk, Handler> handler) { delegate.end(chunk, handler); } - @Override public Future end() { return delegate.end(); } - @Override public void send(Handler> handler) { delegate.send(handler); } - @Override public Future send() { return delegate.send(); } - @Override public void send(String body, Handler> handler) { delegate.send(body, handler); } - @Override public Future send(String body) { return delegate.send(body); } - @Override public void send(Buffer body, Handler> handler) { delegate.send(body, handler); } - @Override public Future send(Buffer body) { return delegate.send(body); } - @Override public void send(ReadStream body, Handler> handler) { delegate.send(body, handler); } - @Override public Future send(ReadStream body) { return delegate.send(body); } - @Override public Future sendFile(String filename) { return delegate.sendFile(filename); } - @Override public Future sendFile(String filename, long offset) { return delegate.sendFile(filename, offset); } - @Override public Future sendFile(String filename, long offset, long length) { return delegate.sendFile(filename, offset, length); } - @Override public HttpServerResponse sendFile(String filename, Handler> resultHandler) { return delegate.sendFile(filename, resultHandler); } - @Override public HttpServerResponse sendFile(String filename, long offset, Handler> resultHandler) { return delegate.sendFile(filename, offset, resultHandler); } - @Override public HttpServerResponse sendFile(String filename, long offset, long length, Handler> resultHandler) { return delegate.sendFile(filename, offset, length, resultHandler); } - @Override public void close() { delegate.close(); } - @Override public boolean ended() { return delegate.ended(); } - @Override public boolean closed() { return delegate.closed(); } - @Override public boolean headWritten() { return delegate.headWritten(); } - @Override public HttpServerResponse headersEndHandler(Handler handler) { return delegate.headersEndHandler(handler); } - @Override public HttpServerResponse bodyEndHandler(Handler handler) { return delegate.bodyEndHandler(handler); } - @Override public long bytesWritten() { return delegate.bytesWritten(); } - @Override public int streamId() { return delegate.streamId(); } - @Override public HttpServerResponse push(HttpMethod method, String host, String path, Handler> handler) { return delegate.push(method, host, path, handler); } - @Override public Future push(HttpMethod method, String host, String path) { return delegate.push(method, host, path); } - @Override public HttpServerResponse push(HttpMethod method, String path, MultiMap headers, Handler> handler) { return delegate.push(method, path, headers, handler); } - @Override public Future push(HttpMethod method, String path, MultiMap headers) { return delegate.push(method, path, headers); } - @Override public HttpServerResponse push(HttpMethod method, String path, Handler> handler) { return delegate.push(method, path, handler); } - @Override public Future push(HttpMethod method, String path) { return delegate.push(method, path); } - @Override public HttpServerResponse push(HttpMethod method, String host, String path, MultiMap headers, Handler> handler) { return delegate.push(method, host, path, headers, handler); } - @Override public Future push(HttpMethod method, String host, String path, MultiMap headers) { return delegate.push(method, host, path, headers); } - @Override public boolean reset() { return delegate.reset(); } - @Override public boolean reset(long code) { return delegate.reset(code); } - @Override public HttpServerResponse writeCustomFrame(int type, int flags, Buffer payload) { return delegate.writeCustomFrame(type, flags, payload); } - @Override public HttpServerResponse writeCustomFrame(HttpFrame frame) { return delegate.writeCustomFrame(frame); } - @Override public HttpServerResponse setStreamPriority(StreamPriority streamPriority) { return delegate.setStreamPriority(streamPriority); } - @Override public HttpServerResponse addCookie(Cookie cookie) { return delegate.addCookie(cookie); } - @Override public Cookie removeCookie(String name) { return delegate.removeCookie(name); } - @Override public Cookie removeCookie(String name, boolean invalidate) { return delegate.removeCookie(name, invalidate); } - @Override public Set removeCookies(String name) { return delegate.removeCookies(name); } - @Override public Set removeCookies(String name, boolean invalidate) { return delegate.removeCookies(name, invalidate); } - @Override public Cookie removeCookie(String name, String domain, String path) { return delegate.removeCookie(name, domain, path); } - @Override public Cookie removeCookie(String name, String domain, String path, boolean invalidate) { return delegate.removeCookie(name, domain, path, invalidate); } - @Override public Future write(Buffer data) { return delegate.write(data); } - @Override public void write(Buffer data, Handler> handler) { delegate.write(data, handler); } - @Override public void end(Handler> handler) { delegate.end(handler); } - @Override public boolean writeQueueFull() { return delegate.writeQueueFull(); } + @Override public HttpServerResponse exceptionHandler(Handler handler) { log.trace("{}: exceptionHandler()", dbgHint); return delegate.exceptionHandler(handler); } + @Override public HttpServerResponse setWriteQueueMaxSize(int maxSize) { log.trace("{}: setWriteQueueMaxSize()", dbgHint); return delegate.setWriteQueueMaxSize(maxSize); } + @Override public HttpServerResponse drainHandler(Handler handler) { log.trace("{}: drainHandler()", dbgHint); return delegate.drainHandler(handler); } + @Override public int getStatusCode() { log.trace("{}: getStatusCode()", dbgHint); return delegate.getStatusCode(); } + @Override public HttpServerResponse setStatusCode(int statusCode) { log.trace("{}: setStatusCode()", dbgHint); return delegate.setStatusCode(statusCode); } + @Override public String getStatusMessage() { log.trace("{}: getStatusMessage()", dbgHint); return delegate.getStatusMessage(); } + @Override public HttpServerResponse setStatusMessage(String statusMessage) { log.trace("{}: setStatusMessage()", dbgHint); return delegate.setStatusMessage(statusMessage); } + @Override public HttpServerResponse setChunked(boolean chunked) { log.trace("{}: setChunked()", dbgHint); return delegate.setChunked(chunked); } + @Override public boolean isChunked() { log.trace("{}: isChunked()", dbgHint); return delegate.isChunked(); } + @Override public MultiMap headers() { log.trace("{}: headers()", dbgHint); return delegate.headers(); } + @Override public HttpServerResponse putHeader(String name, String value) { log.trace("{}: putHeader(Str,Str)", dbgHint); return delegate.putHeader(name, value); } + @Override public HttpServerResponse putHeader(CharSequence name, CharSequence value) { log.trace("{}: putHeader(ChrSeq,ChrSeq)", dbgHint); return delegate.putHeader(name, value); } + @Override public HttpServerResponse putHeader(String name, Iterable values) { log.trace("{}: putHeader(Str,Iter)", dbgHint); return delegate.putHeader(name, values); } + @Override public HttpServerResponse putHeader(CharSequence name, Iterable values) { log.trace("{}: putHeader(ChrSeq,Iter)", dbgHint); return delegate.putHeader(name, values); } + @Override public MultiMap trailers() { log.trace("{}: trailers()", dbgHint); return delegate.trailers(); } + @Override public HttpServerResponse putTrailer(String name, String value) { log.trace("{}: putTrailer(Str,Str)", dbgHint); return delegate.putTrailer(name, value); } + @Override public HttpServerResponse putTrailer(CharSequence name, CharSequence value) { log.trace("{}: putTrailer(ChrSeq,ChrSeq)", dbgHint); return delegate.putTrailer(name, value); } + @Override public HttpServerResponse putTrailer(String name, Iterable values) { log.trace("{}: putTrailer(Str,Iter)", dbgHint); return delegate.putTrailer(name, values); } + @Override public HttpServerResponse putTrailer(CharSequence name, Iterable value) { log.trace("{}: putTrailer(ChrSeq,Iter)", dbgHint); return delegate.putTrailer(name, value); } + @Override public HttpServerResponse closeHandler(Handler handler) { log.trace("{}: closeHandler()", dbgHint); return delegate.closeHandler(handler); } + @Override public HttpServerResponse endHandler(Handler handler) { log.trace("{}: endHandler()", dbgHint); return delegate.endHandler(handler); } + @Override public Future write(String chunk, String enc) { log.trace("{}: write(Str,Str)", dbgHint); return delegate.write(chunk, enc); } + @Override public void write(String chunk, String enc, Handler> handler) { log.trace("{}: write(Str,Str,Hdlr)", dbgHint); delegate.write(chunk, enc, handler); } + @Override public Future write(String chunk) { log.trace("{}: write(Str)", dbgHint); return delegate.write(chunk); } + @Override public void write(String chunk, Handler> handler) { log.trace("{}: write(Str,Hdlr)", dbgHint); delegate.write(chunk, handler); } + @Override public HttpServerResponse writeContinue() { log.trace("{}: writeContinue()", dbgHint); return delegate.writeContinue(); } + @Override public Future end(String chunk) { log.trace("{}: end(Str)", dbgHint); return delegate.end(chunk); } + @Override public void end(String chunk, Handler> handler) { log.trace("{}: end(Str,Hdlr)", dbgHint); delegate.end(chunk, handler); } + @Override public Future end(String chunk, String enc) { log.trace("{}: end(Str,Str)", dbgHint); return delegate.end(chunk, enc); } + @Override public void end(String chunk, String enc, Handler> handler) { log.trace("{}: end(Str,Str,Hdlr)", dbgHint); delegate.end(chunk, enc, handler); } + @Override public Future end(Buffer chunk) { log.trace("{}: end(Buf)", dbgHint); return delegate.end(chunk); } + @Override public void end(Buffer chunk, Handler> handler) { log.trace("{}: end(Buf,Hdlr)", dbgHint); delegate.end(chunk, handler); } + @Override public Future end() { log.trace("{}: end(void)", dbgHint); return delegate.end(); } + @Override public void send(Handler> handler) { log.trace("{}: send(Hdlr)", dbgHint); delegate.send(handler); } + @Override public Future send() { log.trace("{}: send(void)", dbgHint); return delegate.send(); } + @Override public void send(String body, Handler> handler) { log.trace("{}: send(Str,Hdlr)", dbgHint); delegate.send(body, handler); } + @Override public Future send(String body) { log.trace("{}: send(Str)", dbgHint); return delegate.send(body); } + @Override public void send(Buffer body, Handler> handler) { log.trace("{}: send(Buf,Hdlr)", dbgHint); delegate.send(body, handler); } + @Override public Future send(Buffer body) { log.trace("{}: send(Buf)", dbgHint); return delegate.send(body); } + @Override public void send(ReadStream body, Handler> handler) { log.trace("{}: send(RdStr,Hdlr)", dbgHint); delegate.send(body, handler); } + @Override public Future send(ReadStream body) { log.trace("{}: send(RdStr)", dbgHint); return delegate.send(body); } + @Override public Future sendFile(String filename) { log.trace("{}: sendFile(Str)", dbgHint); return delegate.sendFile(filename); } + @Override public Future sendFile(String filename, long offset) { log.trace("{}: sendFile(Str,lng)", dbgHint); return delegate.sendFile(filename, offset); } + @Override public Future sendFile(String filename, long offset, long length) { log.trace("{}: sendFile(Str,lng,lng)", dbgHint); return delegate.sendFile(filename, offset, length); } + @Override public HttpServerResponse sendFile(String filename, Handler> resultHandler) { log.trace("{}: sendFile(Str,Hdlr)", dbgHint); return delegate.sendFile(filename, resultHandler); } + @Override public HttpServerResponse sendFile(String filename, long offset, Handler> resultHandler) { log.trace("{}: sendFile(Str,lng,Hdlr)", dbgHint); return delegate.sendFile(filename, offset, resultHandler); } + @Override public HttpServerResponse sendFile(String filename, long offset, long length, Handler> resultHandler) { log.trace("{}: sendFile(Str,lng,lng,Hdlr)", dbgHint); return delegate.sendFile(filename, offset, length, resultHandler); } + @Override public void close() { log.trace("{}: close()", dbgHint); delegate.close(); } + @Override public boolean ended() { log.trace("{}: ended()", dbgHint); return delegate.ended(); } + @Override public boolean closed() { log.trace("{}: closed()", dbgHint); return delegate.closed(); } + @Override public boolean headWritten() { log.trace("{}: headWritten()", dbgHint); return delegate.headWritten(); } + @Override public HttpServerResponse headersEndHandler(Handler handler) { log.trace("{}: headersEndHandler()", dbgHint); return delegate.headersEndHandler(handler); } + @Override public HttpServerResponse bodyEndHandler(Handler handler) { log.trace("{}: bodyEndHandler()", dbgHint); return delegate.bodyEndHandler(handler); } + @Override public long bytesWritten() { log.trace("{}: bytesWritten()", dbgHint); return delegate.bytesWritten(); } + @Override public int streamId() { log.trace("{}: streamId()", dbgHint); return delegate.streamId(); } + @Override public HttpServerResponse push(HttpMethod method, String host, String path, Handler> handler) { log.trace("{}: push(Mthd,Str,Str,Hdlr)", dbgHint); return delegate.push(method, host, path, handler); } + @Override public Future push(HttpMethod method, String host, String path) { log.trace("{}: push(Mthd,Str,Str)", dbgHint); return delegate.push(method, host, path); } + @Override public HttpServerResponse push(HttpMethod method, String path, MultiMap headers, Handler> handler) { log.trace("{}: push(Mthd,Str,Map,Hdlr)", dbgHint); return delegate.push(method, path, headers, handler); } + @Override public Future push(HttpMethod method, String path, MultiMap headers) { log.trace("{}: push(Mthd,Str,Map)", dbgHint); return delegate.push(method, path, headers); } + @Override public HttpServerResponse push(HttpMethod method, String path, Handler> handler) { log.trace("{}: push(Mthd,Str,Hdlr)", dbgHint); return delegate.push(method, path, handler); } + @Override public Future push(HttpMethod method, String path) { log.trace("{}: push(Mthd,Str)", dbgHint); return delegate.push(method, path); } + @Override public HttpServerResponse push(HttpMethod method, String host, String path, MultiMap headers, Handler> handler) { log.trace("{}: push(Mthd,Str,Str,Map,Hdlr)", dbgHint); return delegate.push(method, host, path, headers, handler); } + @Override public Future push(HttpMethod method, String host, String path, MultiMap headers) { log.trace("{}: push(Mthd,Str,Str,Map)", dbgHint); return delegate.push(method, host, path, headers); } + @Override public boolean reset() { log.trace("{}: reset(void)", dbgHint); return delegate.reset(); } + @Override public boolean reset(long code) { log.trace("{}: reset({})", dbgHint, code); return delegate.reset(code); } + @Override public HttpServerResponse writeCustomFrame(int type, int flags, Buffer payload) { log.trace("{}: writeCustomFrame({}, {}, Buf)", dbgHint, type, flags); return delegate.writeCustomFrame(type, flags, payload); } + @Override public HttpServerResponse writeCustomFrame(HttpFrame frame) { log.trace("{}: writeCustomFrame()", dbgHint); return delegate.writeCustomFrame(frame); } + @Override public HttpServerResponse setStreamPriority(StreamPriority streamPriority) { log.trace("{}: setStreamPriority()", dbgHint); return delegate.setStreamPriority(streamPriority); } + @Override public HttpServerResponse addCookie(Cookie cookie) { log.trace("{}: addCookie()", dbgHint); return delegate.addCookie(cookie); } + @Override public Cookie removeCookie(String name) { log.trace("{}: removeCookie({})", dbgHint, name); return delegate.removeCookie(name); } + @Override public Cookie removeCookie(String name, boolean invalidate) { log.trace("{}: removeCookie({}, {})", dbgHint, name, invalidate); return delegate.removeCookie(name, invalidate); } + @Override public Set removeCookies(String name) { log.trace("{}: removeCookies({})", dbgHint, name); return delegate.removeCookies(name); } + @Override public Set removeCookies(String name, boolean invalidate) { log.trace("{}: removeCookies({}, {})", dbgHint, name, invalidate); return delegate.removeCookies(name, invalidate); } + @Override public Cookie removeCookie(String name, String domain, String path) { log.trace("{}: removeCookie({}, Str, Str)", dbgHint, name); return delegate.removeCookie(name, domain, path); } + @Override public Cookie removeCookie(String name, String domain, String path, boolean invalidate) { log.trace("{}: removeCookie({}, Str, Str, {})", dbgHint, name, invalidate); return delegate.removeCookie(name, domain, path, invalidate); } + @Override public Future write(Buffer data) { log.trace("{}: write(Buf)", dbgHint); return delegate.write(data); } + @Override public void write(Buffer data, Handler> handler) { log.trace("{}: write(Buf, Hdlr)", dbgHint); delegate.write(data, handler); } + @Override public void end(Handler> handler) { log.trace("{}: end(Hdlr)", dbgHint); delegate.end(handler); } + @Override public boolean writeQueueFull() { log.trace("{}: writeQueueFull()", dbgHint); return delegate.writeQueueFull(); } } diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java index 4eb1e20..eadca9a 100644 --- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java @@ -5,6 +5,8 @@ import io.vertx.ext.web.RoutingContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static java.lang.System.currentTimeMillis; + public class Foo { @@ -36,7 +38,7 @@ public class Foo { public static void onBeforeMainVerticleRouteGeneric(HttpServerRequest req) { if( !isServerInfoRequst(req) ) return; log.trace("onBeforeMainVerticleRouteGeneric()"); - onBeginRouteEpochMs = System.currentTimeMillis(); + onBeginRouteEpochMs = currentTimeMillis(); assert !assertRequestEquality || serverInfoRequest == req; } @@ -121,8 +123,8 @@ public class Foo { } public static void onEndCompleted(long responseBegEpochMs){ - long durationMs = System.currentTimeMillis() - responseBegEpochMs; - log.debug("Request took {}ms", durationMs); + long nowEpochMs = currentTimeMillis(); + log.debug("Request took {}ms and {}ms", nowEpochMs - onBeginRouteEpochMs, nowEpochMs - responseBegEpochMs); } } -- cgit v1.1 From 8029cdddc4a977bc67399a62e603d85b61456846 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 31 Jan 2024 17:10:11 +0100 Subject: Updte gateleen-kludge --- .../DelegateVertxHttpServerRequestInternal.java | 69 +++++- .../gateleenKludge/tmoutissue20240123/Foo.java | 2 +- .../HoustonInfoRequestTracer.java | 265 +++++++++++++++++++++ 3 files changed, 334 insertions(+), 2 deletions(-) create mode 100644 src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java index 16c7259..92fe3fc 100644 --- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java @@ -22,6 +22,7 @@ import io.vertx.core.net.NetSocket; import io.vertx.core.net.SocketAddress; import io.vertx.core.streams.Pipe; import io.vertx.core.streams.WriteStream; +import org.slf4j.Logger; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.SSLSession; @@ -29,13 +30,19 @@ import javax.security.cert.X509Certificate; import java.util.Map; import java.util.Set; +import static org.slf4j.LoggerFactory.getLogger; + public class DelegateVertxHttpServerRequestInternal implements HttpServerRequestInternal { + private static final Logger log = getLogger(DelegateVertxHttpServerRequestInternal.class); private final HttpServerRequestInternal delegate; private final boolean isDebugging = true; + private final String dbgHint; - public DelegateVertxHttpServerRequestInternal(HttpServerRequest delegate) { + public DelegateVertxHttpServerRequestInternal(String debugHint, HttpServerRequest delegate) { + log.trace("{}: new DelegateVertxHttpServerRequestInternal()", debugHint); this.delegate = (HttpServerRequestInternal) delegate; + this.dbgHint = debugHint; } private void breakpoint(){ @@ -46,306 +53,357 @@ public class DelegateVertxHttpServerRequestInternal implements HttpServerRequest @Override public HttpServerRequest exceptionHandler(Handler handler) { + log.trace("{}: exceptionHandler(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); return delegate.exceptionHandler(handler); } @Override public HttpServerRequest handler(Handler handler) { + log.trace("{}: handler(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); return delegate.handler(handler); } @Override public HttpServerRequest pause() { + log.trace("{}: pause()", dbgHint); if( isDebugging ) breakpoint(); return delegate.pause(); } @Override public HttpServerRequest resume() { + log.trace("{}: resume()", dbgHint); if( isDebugging ) breakpoint(); return delegate.resume(); } @Override public HttpServerRequest fetch(long amount) { + log.trace("{}: fetch({})", dbgHint, amount); if( isDebugging ) breakpoint(); return delegate.fetch(amount); } @Override public HttpServerRequest endHandler(Handler endHandler) { + log.trace("{}: endHandler(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); return delegate.endHandler(endHandler); } @Override public HttpVersion version() { + log.trace("{}: version()", dbgHint); if( isDebugging ) breakpoint(); return delegate.version(); } @Override public HttpMethod method() { + log.trace("{}: method()", dbgHint); if( isDebugging ) breakpoint(); return delegate.method(); } @Override public boolean isSSL() { + log.trace("{}: isSSL()", dbgHint); if( isDebugging ) breakpoint(); return delegate.isSSL(); } @Override public String scheme() { + log.trace("{}: scheme()", dbgHint); if( isDebugging ) breakpoint(); return delegate.scheme(); } @Override public String uri() { + log.trace("{}: uri()", dbgHint); if( isDebugging ) breakpoint(); return delegate.uri(); } @Override public String path() { + log.trace("{}: path()", dbgHint); if( isDebugging ) breakpoint(); return delegate.path(); } @Override public String query() { + log.trace("{}: query()", dbgHint); if( isDebugging ) breakpoint(); return delegate.query(); } @Override public String host() { + log.trace("{}: host()", dbgHint); if( isDebugging ) breakpoint(); return delegate.host(); } @Override public long bytesRead() { + log.trace("{}: bytesRead()", dbgHint); if( isDebugging ) breakpoint(); return delegate.bytesRead(); } @Override public HttpServerResponse response() { + log.trace("{}: response()", dbgHint); if( isDebugging ) breakpoint(); return delegate.response(); } @Override public MultiMap headers() { + log.trace("{}: headers()", dbgHint); if( isDebugging ) breakpoint(); return delegate.headers(); } @Override public String getHeader(String headerName) { + log.trace("{}: getHeader(\"{}\")", dbgHint, headerName); if( isDebugging ) breakpoint(); return delegate.getHeader(headerName); } @Override public String getHeader(CharSequence headerName) { + log.trace("{}: getHeader(\"{}\")", dbgHint, headerName); if( isDebugging ) breakpoint(); return delegate.getHeader(headerName); } @Override public MultiMap params() { + log.trace("{}: params()", dbgHint); if( isDebugging ) breakpoint(); return delegate.params(); } @Override public String getParam(String paramName) { + log.trace("{}: getParam(\"{}\")", dbgHint, paramName); if( isDebugging ) breakpoint(); return delegate.getParam(paramName); } @Override public String getParam(String paramName, String defaultValue) { + log.trace("{}: getParam(\"{}\", \"{}\")", dbgHint, paramName, defaultValue); if( isDebugging ) breakpoint(); return delegate.getParam(paramName, defaultValue); } @Override public SocketAddress remoteAddress() { + log.trace("{}: remoteAddress()", dbgHint); if( isDebugging ) breakpoint(); return delegate.remoteAddress(); } @Override public SocketAddress localAddress() { + log.trace("{}: localAddress()", dbgHint); if( isDebugging ) breakpoint(); return delegate.localAddress(); } @Override public SSLSession sslSession() { + log.trace("{}: sslSession()", dbgHint); if( isDebugging ) breakpoint(); return delegate.sslSession(); } @Override public X509Certificate[] peerCertificateChain() throws SSLPeerUnverifiedException { + log.trace("{}: peerCertificateChain()", dbgHint); if( isDebugging ) breakpoint(); return delegate.peerCertificateChain(); } @Override public String absoluteURI() { + log.trace("{}: absoluteURI()", dbgHint); if( isDebugging ) breakpoint(); return delegate.absoluteURI(); } @Override public HttpServerRequest bodyHandler(Handler bodyHandler) { + log.trace("{}: bodyHandler(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); return delegate.bodyHandler(bodyHandler); } @Override public HttpServerRequest body(Handler> handler) { + log.trace("{}: body(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); return delegate.body(handler); } @Override public Future body() { + log.trace("{}: body(void)", dbgHint); if( isDebugging ) breakpoint(); return delegate.body(); } @Override public void end(Handler> handler) { + log.trace("{}: end(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); delegate.end(handler); } @Override public Future end() { + log.trace("{}: end(void)", dbgHint); if( isDebugging ) breakpoint(); return delegate.end(); } @Override public void toNetSocket(Handler> handler) { + log.trace("{}: toNetSocket(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); delegate.toNetSocket(handler); } @Override public Future toNetSocket() { + log.trace("{}: toNetSocket(void)", dbgHint); if( isDebugging ) breakpoint(); return delegate.toNetSocket(); } @Override public HttpServerRequest setExpectMultipart(boolean expect) { + log.trace("{}: toNetSocket({})", dbgHint, expect); if( isDebugging ) breakpoint(); return delegate.setExpectMultipart(expect); } @Override public boolean isExpectMultipart() { + log.trace("{}: isExpectMultipart()", dbgHint); if( isDebugging ) breakpoint(); return delegate.isExpectMultipart(); } @Override public HttpServerRequest uploadHandler(Handler uploadHandler) { + log.trace("{}: uploadHandler(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); return delegate.uploadHandler(uploadHandler); } @Override public MultiMap formAttributes() { + log.trace("{}: formAttributes()", dbgHint); if( isDebugging ) breakpoint(); return delegate.formAttributes(); } @Override public String getFormAttribute(String attributeName) { + log.trace("{}: getFormAttribute(\"{}\")", dbgHint, attributeName); if( isDebugging ) breakpoint(); return delegate.getFormAttribute(attributeName); } @Override public int streamId() { + log.trace("{}: streamId()", dbgHint); if( isDebugging ) breakpoint(); return delegate.streamId(); } @Override public void toWebSocket(Handler> handler) { + log.trace("{}: toWebSocket(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); delegate.toWebSocket(handler); } @Override public Future toWebSocket() { + log.trace("{}: toWebSocket()", dbgHint); if( isDebugging ) breakpoint(); return delegate.toWebSocket(); } @Override public boolean isEnded() { + log.trace("{}: isEnded()", dbgHint); if( isDebugging ) breakpoint(); return delegate.isEnded(); } @Override public HttpServerRequest customFrameHandler(Handler handler) { + log.trace("{}: customFrameHandler(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); return delegate.customFrameHandler(handler); } @Override public HttpConnection connection() { + log.trace("{}: connection()", dbgHint); if( isDebugging ) breakpoint(); return delegate.connection(); } @Override public StreamPriority streamPriority() { + log.trace("{}: streamPriority()", dbgHint); if( isDebugging ) breakpoint(); return delegate.streamPriority(); } @Override public HttpServerRequest streamPriorityHandler(Handler handler) { + log.trace("{}: streamPriorityHandler(Hdlr)", dbgHint); if( isDebugging ) breakpoint(); return delegate.streamPriorityHandler(handler); } @Override public DecoderResult decoderResult() { + log.trace("{}: decoderResult()", dbgHint); if( isDebugging ) breakpoint(); return delegate.decoderResult(); } @Override public Cookie getCookie(String name) { + log.trace("{}: getCookie(\"{}\")", dbgHint, name); if( isDebugging ) breakpoint(); return delegate.getCookie(name); } @Override public Cookie getCookie(String name, String domain, String path) { + log.trace("{}: getCookie(\"{}\", Str, Str)", dbgHint, name); if( isDebugging ) breakpoint(); return delegate.getCookie(name, domain, path); } @Override public int cookieCount() { + log.trace("{}: cookieCount()", dbgHint); if( isDebugging ) breakpoint(); return delegate.cookieCount(); } @@ -353,54 +411,63 @@ public class DelegateVertxHttpServerRequestInternal implements HttpServerRequest @Override @Deprecated public Map cookieMap() { + log.trace("{}: cookieMap()", dbgHint); if( isDebugging ) breakpoint(); return delegate.cookieMap(); } @Override public Set cookies(String name) { + log.trace("{}: cookies(\"{}\")", dbgHint, name); if( isDebugging ) breakpoint(); return delegate.cookies(name); } @Override public Set cookies() { + log.trace("{}: cookies(void)", dbgHint); if( isDebugging ) breakpoint(); return delegate.cookies(); } @Override public HttpServerRequest routed(String route) { + log.trace("{}: routed(\"{}\")", dbgHint, route); if( isDebugging ) breakpoint(); return delegate.routed(route); } @Override public Pipe pipe() { + log.trace("{}: pipe()", dbgHint); if( isDebugging ) breakpoint(); return delegate.pipe(); } @Override public Future pipeTo(WriteStream dst) { + log.trace("{}: pipeTo(WrStrm)", dbgHint); if( isDebugging ) breakpoint(); return delegate.pipeTo(dst); } @Override public void pipeTo(WriteStream dst, Handler> handler) { + log.trace("{}: pipeTo(WrStrm,Hdlr)", dbgHint); if( isDebugging ) breakpoint(); delegate.pipeTo(dst, handler); } @Override public Context context() { + log.trace("{}: context()", dbgHint); if( isDebugging ) breakpoint(); return delegate.context(); } @Override public Object metric() { + log.trace("{}: metric()", dbgHint); if( isDebugging ) breakpoint(); return delegate.metric(); } diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java index eadca9a..8295088 100644 --- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java @@ -11,7 +11,7 @@ import static java.lang.System.currentTimeMillis; public class Foo { private static final Logger log = Foo.getLogger(Foo.class); - private static final boolean assertRequestEquality = true; + private static final boolean assertRequestEquality = false; private static HttpServerRequest serverInfoRequest; private static io.vertx.core.http.impl.HttpServerRequestInternal restStorageEvBusAdaptMappdHttpServReq; private static long onBeginRouteEpochMs; diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java new file mode 100644 index 0000000..a011c7f --- /dev/null +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java @@ -0,0 +1,265 @@ +package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123; + +import io.vertx.core.http.HttpMethod; +import io.vertx.core.http.HttpServerRequest; +import io.vertx.core.http.HttpServerResponse; +import org.slf4j.Logger; + +import java.lang.reflect.Field; +import java.util.NoSuchElementException; + +import static java.lang.System.currentTimeMillis; +import static org.slf4j.LoggerFactory.getLogger; + + +/** + *

This class got introduced to trace timings of "/houston/server/info" + * requests. It is optimized for exactly this purpose AND NOTHING ELSE! It was + * introduced because SDCISA-13746 is only observable on PROD. It does not + * reproduce locally, and not even on TEST, INT or PREPROD. So we do not really + * have another choice but tracing down this bug directly on PROD itself. + * Unluckily it is not that simple to do so. First debugging/testing on PROD env + * always has some risk. Plus, also our feedback-loop is terribly slow due to our + * heavyweight deployment process. So to be able to see if this code actually does + * what it should, we likely have to wait up to SEVERAL MONTHS.

+ */ +public class HoustonInfoRequestTracer implements org.swisspush.gateleen.core.debug.InfoRequestTracer { + + private static final Logger log = getLogger(HoustonInfoRequestTracer.class); + private static final String INFO_URI = "/houston/server/info"; + private static final int MAX_REQUESTS = 8; /*WARN: do NOT go too high*/ + private static final Long NO_VALUE = Long.MIN_VALUE / 2; + private static final Class wrapperClazz; + private static final Field delegateField; + private static final int + FLG_WritingHttpResponseHasReturned = 1 << 0, + FLG_WritingHttpResponseEnd = 1 << 1, + FLG_slotIsBusy = 1 << 2; + private final int requestDurationBailTresholdLowMs = 42; /* requests faster than 42 millis likely not interesting*/ + private final Object requestSlotLock = new Object(); + private final HttpServerRequest[] + requestInstances = new HttpServerRequest[MAX_REQUESTS]; + private int slotReUseOffset; + private final int[] + requestFlg = new int[MAX_REQUESTS]; + private final long[] + requestNewHttpReqEpochMs = new long[MAX_REQUESTS], + authorizerBeginMs = new long[MAX_REQUESTS], + authorizerEndMs = new long[MAX_REQUESTS], + beforeCatchallRouting = new long[MAX_REQUESTS], + responseGotRequestedMs = new long[MAX_REQUESTS], + writingResponseBeginMs = new long[MAX_REQUESTS], + writingResponseHasReturnedMs = new long[MAX_REQUESTS], + writingResponseEndMs = new long[MAX_REQUESTS], + requestDoneMs = new long[MAX_REQUESTS]; + + static { + try { + wrapperClazz = Class.forName("io.vertx.ext.web.impl.HttpServerRequestWrapper"); + delegateField = wrapperClazz.getDeclaredField("delegate"); + delegateField.setAccessible(true); + } catch (ClassNotFoundException | NoSuchFieldException ex) { + assert false : "TODO_395w8zuj"; + throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex); + } + } + + public void onNewHttpRequest(HttpServerRequest req) { + if( !isOfInterestEvenReqNotYetSeen(req) ) return; + req = unwrap(req); + int reqIdx; + synchronized (requestSlotLock){ + reqIdx = getFreeSlotIdx(); + if( reqIdx == -2 ) { + log.debug("No more space to trace yet another request"); + return; + } + assert reqIdx >= 0 && reqIdx < MAX_REQUESTS; + assert !alreadyKnowRequest(req) : "TODO what if.."; + requestFlg[reqIdx] = FLG_slotIsBusy; + } + requestInstances[reqIdx] = req; + requestNewHttpReqEpochMs[reqIdx] = currentTimeMillis(); + authorizerBeginMs[reqIdx] = NO_VALUE; + authorizerEndMs[reqIdx] = NO_VALUE; + beforeCatchallRouting[reqIdx] = NO_VALUE; + responseGotRequestedMs[reqIdx] = NO_VALUE; + writingResponseBeginMs[reqIdx] = NO_VALUE; + writingResponseHasReturnedMs[reqIdx] = NO_VALUE; + writingResponseEndMs[reqIdx] = NO_VALUE; + requestDoneMs[reqIdx] = NO_VALUE; + } + + public void onHttpRequestError(HttpServerRequest req, Throwable ex) { + if( !isOfInterest(req) ) return; + int reqIdx = getIdxOf(req); + long durMs = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx]; + throw new UnsupportedOperationException(/*TODO*/"Not impl yet. Took "+durMs+"ms", ex); + } + + public void onAuthorizerBegin(HttpServerRequest req) { + if( !isOfInterest(req) ) return; + int reqIdx = getIdxOf(req); + authorizerBeginMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx]; + } + + public void onAuthorizerEnd(HttpServerRequest req) { + if( !isOfInterest(req) ) return; + int reqIdx = getIdxOf(req); + authorizerEndMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx]; + } + + public HttpServerRequest filterRequestBeforeCallingCatchallRouter(HttpServerRequest req) { + if( !isOfInterest(req) ) return req; + int reqIdx = getIdxOf(req); + beforeCatchallRouting[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx]; + return new InterceptingServerRequest("ai9oh8urtgj", req); + } + + private void onHttpResponseGotRequested(HttpServerRequest req) { + assert isOfInterest(req); + int reqIdx = getIdxOf(req); + responseGotRequestedMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx]; + } + + public void onWritingHttpResponseBegin(HttpServerRequest req) { + int reqIdx = getIdxOf(req); + writingResponseBeginMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx]; + } + + public void onWritingHttpResponseHasReturned(HttpServerRequest req) { + assert isOfInterest(req); + int reqIdx = getIdxOf(req); + writingResponseHasReturnedMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx]; + requestFlg[reqIdx] |= FLG_WritingHttpResponseHasReturned; + tryCompletingRequest(reqIdx); + } + + public void onWritingHttpResponseEnd(Throwable ex, HttpServerRequest req) { + assert ex == null; + assert isOfInterest(req); + int reqIdx = getIdxOf(req); + writingResponseEndMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx]; + requestFlg[reqIdx] |= FLG_WritingHttpResponseEnd; + tryCompletingRequest(reqIdx); + } + + private void tryCompletingRequest(int reqIdx) { + int requestIsDoneMask = FLG_WritingHttpResponseHasReturned | FLG_WritingHttpResponseEnd; + if ((requestFlg[reqIdx] & requestIsDoneMask) != requestIsDoneMask) return; + requestDoneMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx]; + report(reqIdx); + /* free our slot */ + synchronized (requestSlotLock){ + requestFlg[reqIdx] &= ~FLG_slotIsBusy; + requestInstances[reqIdx] = null; + } + } + + private void report(int reqIdx) { + if( requestDoneMs[reqIdx] < requestDurationBailTresholdLowMs ){ + /*fast requests usually are not worth logging, we're interested in the slow requests only*/ + if (log.isTraceEnabled()) log.trace( + "Req took {}ms (authBeg={}ms, authEnd={}ms, route={}ms, getRsp={}ms, wrBeg={}ms, wrRet={}ms, wrEnd={}ms)", + requestDoneMs[reqIdx], + authorizerBeginMs[reqIdx], + authorizerEndMs[reqIdx], + beforeCatchallRouting[reqIdx], + responseGotRequestedMs[reqIdx], + writingResponseBeginMs[reqIdx], + writingResponseHasReturnedMs[reqIdx], + writingResponseEndMs[reqIdx]); + }else{ + /*slow requests are interesting*/ + log.info("Req took {}ms (authBeg={}ms, authEnd={}ms, route={}ms, getRsp={}ms, wrBeg={}ms, wrRet={}ms, wrEnd={}ms)", + requestDoneMs[reqIdx], + authorizerBeginMs[reqIdx], + authorizerEndMs[reqIdx], + beforeCatchallRouting[reqIdx], + responseGotRequestedMs[reqIdx], + writingResponseBeginMs[reqIdx], + writingResponseHasReturnedMs[reqIdx], + writingResponseEndMs[reqIdx]); + } + } + + private boolean isOfInterest(HttpServerRequest req){ + if( !isOfInterestEvenReqNotYetSeen(req) ) return false; + if( !alreadyKnowRequest(req) ) return false; // Without start point, we cannot report anything useful + return true; + } + + private boolean isOfInterestEvenReqNotYetSeen(HttpServerRequest req) { + if( !log.isInfoEnabled() ) return false; // if we produce no output, makes no sense to burn CPU for it + if( !HttpMethod.GET.equals(req.method()) ) return false; // Only GET is interesting for us + if( !INFO_URI.equals(req.uri()) ) return false; // Only this specific URI is of interest + return true; + } + + private int getIdxOf(HttpServerRequest req) { + req = unwrap(req); + for( int idx = 0 ; idx < MAX_REQUESTS ; ++idx ){ + if( requestInstances[idx] == req ) return idx; + } + assert false : "why does this happen?"; + throw new NoSuchElementException(/*TODO*/"Not impl yet"); + } + + /** @return either index of free slot or -2 if no slot available */ + private int getFreeSlotIdx() { + for( int i = 0 ; i < MAX_REQUESTS ; ++i ){ + if( (requestFlg[i+slotReUseOffset%MAX_REQUESTS] & FLG_slotIsBusy) == 0 ) { + slotReUseOffset = i + 1; + return i; + } + } + return -2; + } + + private boolean alreadyKnowRequest(HttpServerRequest req) { + req = unwrap(req); + for( int i = 0 ; i < (0 + MAX_REQUESTS) ; ++i ){ + if((requestFlg[i] & FLG_slotIsBusy) == 0) continue; + if( requestInstances[i] == req ) return true; + } + return false; + } + + private HttpServerRequest unwrap(HttpServerRequest req){ + for( boolean hasChanged = true ; hasChanged ;){ + hasChanged = false; + while (req instanceof InterceptingServerRequest) { + hasChanged = true; + req = ((InterceptingServerRequest) req).delegate; + } + while(wrapperClazz.isInstance(req)){ + hasChanged = true; + try { + req = (HttpServerRequest) delegateField.get(req); + } catch (IllegalAccessException ex) { + throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex); + } + } + } + assert req != null; + return req; + } + + private class InterceptingServerRequest extends DelegateVertxHttpServerRequestInternal { + private final HttpServerRequest delegate; + + public InterceptingServerRequest(String debugHint, HttpServerRequest delegate) { + super(debugHint, delegate); + assert isOfInterest(delegate); + this.delegate = delegate; + } + + @Override public HttpServerResponse response() { + assert isOfInterest(delegate); + onHttpResponseGotRequested(delegate); + return super.response(); + } + } + + +} -- cgit v1.1 From bff6093c6987d9d0e6853295318b75c57bb221cf Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 2 Feb 2024 16:59:59 +0100 Subject: Introduce RmArtifactBaseDir.lua --- src/main/lua/paisa-fleet/RmArtifactBaseDir.lua | 258 +++++++++++++++++++++++++ 1 file changed, 258 insertions(+) create mode 100644 src/main/lua/paisa-fleet/RmArtifactBaseDir.lua diff --git a/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua new file mode 100644 index 0000000..a30ff6a --- /dev/null +++ b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua @@ -0,0 +1,258 @@ + +local SL = require("scriptlee") +local newHttpClient = SL.newHttpClient +local newShellcmd = SL.newShellcmd +local newSqlite = SL.newSqlite +local objectSeal = SL.objectSeal +local parseJSON = SL.parseJSON +local sleep = SL.posix.sleep +local startOrExecute = SL.reactor.startOrExecute +SL = nil +local log = io.stdout + + +function printHelp() + io.write("\n" + .." WARN: This is experimental.\n" + .." \n" + .." Options:\n" + .." --backendHost (eg \"localhost\")\n" + .." --backendPort (eg 80)\n" + .." --backendPath (eg \"/houston\")\n" + .." --sshPort (eg 22)\n" + .." --sshUser (eg \"eddieuser\")\n" + .." --state (eg \"path/to/state\")\n" + .." \n") +end + + +function parseArgs( app ) + app.backendPort = 80 + app.statePath = ":memory:" + local iA = 0 + ::nextArg:: + iA = iA + 1 + local arg = _ENV.arg[iA] + if not arg then + goto verifyResult + elseif arg == "--help" then + app.isHelp = true return 0 + elseif arg == "--backendHost" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --backendHost needs value\n")return end + app.backendHost = arg + elseif arg == "--backendPort" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --backendPort needs value\n")return end + app.backendHost = arg + elseif arg == "--backendPath" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --backendPath needs value\n")return end + app.backendPath = arg + elseif arg == "--sshPort" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --sshPort needs value\n")return end + app.sshPort = arg + elseif arg == "--sshUser" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --sshUser needs value\n")return end + app.sshUser = arg + elseif arg == "--state" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --state needs value\n")return end + app.statePath = arg + end + goto nextArg + ::verifyResult:: + if not app.backendHost then log:write("EINVAL: --backendHost missing\n")return end + if not app.backendPath then log:write("EINVAL: --backendPath missing\n")return end + if app.backendPath:find("^C:.") then log:write("WARN: Path looks wrong: ".. app.backendPath.."\n") end + return 0 +end + + +function removeCompletedEddies( app ) + local db = getStateDb(app) + local rs = db:prepare("SELECT eddieName FROM CompletedEddies;"):execute() + local eddieNamesToRemoveSet = {} + while rs:next() do + assert(rs:type(1) == "TEXT", rs:type(1)) + assert(rs:name(1) == "eddieName", rs:name(1)) + local eddieName = rs:value(1) + eddieNamesToRemoveSet[eddieName] = true + end + local oldEddies = app.eddies + app.eddies = {} + local numKeep, numDrop = 0, 0 + for _, eddie in pairs(oldEddies) do + if not eddieNamesToRemoveSet[eddie.eddieName] then + --log:write("[DEBUG] Keep '".. eddie.eddieName .."'\n") + numKeep = numKeep + 1 + table.insert(app.eddies, eddie) + else + numDrop = numDrop + 1 + --log:write("[DEBUG] Drop '".. eddie.eddieName .."': Already done\n") + end + end + log:write("[DEBUG] todo: ".. numKeep ..", done: ".. numDrop .."\n") +end + + +function markEddieDone( app, eddieName ) + assert(type(app) == "table") + assert(type(eddieName) == "string") + log:write("[DEBUG] markEddieDone(".. eddieName ..")\n") + local db = getStateDb(app) + local stmt = db:prepare("INSERT OR IGNORE INTO CompletedEddies(eddieName,doneAt)VALUES($eddieName, $now)") + stmt:reset() + stmt:bind("$eddieName", eddieName) + stmt:bind("$now", os.date("!%Y-%m-%dT%H:%M:%S+00:00")) + stmt:execute() +end + + +function getStateDb( app ) + if not app.stateDb then + app.stateDb = newSqlite{ database = app.statePath } + app.stateDb:prepare("CREATE TABLE IF NOT EXISTS CompletedEddies(" + .." eddieName TEXT UNIQUE," + .." doneAt TEXT);"):execute() + end + return app.stateDb +end + + +function loadEddies( app ) + local httpClient = newHttpClient{} + local req = objectSeal{ + base = false, + rspCode = false, + rspBody = false, + isDone = false, + } + req.base = httpClient:request{ + cls = req, + host = app.backendHost, port = app.backendPort, + method = "GET", url = app.backendPath .."/data/preflux/inventory", + onRspHdr = function( rspHdr, req ) + req.rspCode = rspHdr.status + if rspHdr.status ~= 200 then + log:write(".-----------------------------------------\n") + log:write("| ".. rspHdr.proto .." ".. rspHdr.status .." ".. rspHdr.phrase .."\n") + for i,h in ipairs(rspHdr.headers) do + log:write("| ".. h[1] ..": ".. h[2] .."\n") + end + log:write("| \n") + end + end, + onRspChunk = function( buf, req ) + if req.rspCode ~= 200 then log:write("| ".. buf:gsub("\n", "\n| ")) return end + if buf then + if not req.rspBody then req.rspBody = buf + else req.rspBody = req.rspBody .. buf end + end + end, + onRspEnd = function( req ) + if req.rspCode ~= 200 then log:write("\n'-----------------------------------------\n") end + req.isDone = true + end, + } + req.base:closeSnk() + assert(req.isDone) + local prefluxInventory = parseJSON(req.rspBody) + local eddies = {} + for eddieName, detail in pairs(prefluxInventory.hosts) do + table.insert(eddies, objectSeal{ + eddieName = eddieName, + lastSeen = detail.lastSeen:value(), + }) + end + app.eddies = eddies +end + + +function makeWhateverWithEddies( app ) + local cmdLinePre = "ssh -oConnectTimeout=5" + if app.sshPort then cmdLinePre = cmdLinePre .." -p".. app.sshPort end + if app.sshUser then cmdLinePre = cmdLinePre .." \"-oUser=".. app.sshUser .."\"" end + for k,eddie in pairs(app.eddies) do + local eddieName = eddie.eddieName + local isEddie = eddieName:find("^eddie%d%d%d%d%d$") + local isTeddie = eddieName:find("^teddie%d%d$") + local isVted = eddieName:find("^vted%d%d$") + local isAws = eddieName:find("^10.117.%d+.%d+$") + local isDevMachine = eddieName:find("^w00[a-z0-9][a-z0-9][a-z0-9]$") + if isAws or isDevMachine or isVted then + log:write("[DEBUG] Skip \"".. eddieName .."\"\n") + goto nextEddie + end + assert(isEddie or isTeddie, eddieName or"nil") + local okMarker = "OK_".. math.random(10000000, 99999999) .."wCAkgQQA2AJAzAIA" + local cmdLine = cmdLinePre .." ".. eddieName + -- report only + --.." \"-oRemoteCommand=test -e /data/instances/default && ls -Ahl /data/instances/default\"" + -- DELETE them + .." \"-oRemoteCommand=true" + .. " && if test -e /data/instances/default/\\${ARTIFACT_BASE_DIR}; then true" + .. " && find /data/instances/default/\\${ARTIFACT_BASE_DIR} -type d -mtime +420 -print -delete" + .. " ;fi" + .. " && echo ".. okMarker .."" + .. " \"" + log:write("\n[DEBUG] ".. cmdLine.."\n") + log:write("[DEBUG] sleep ...\n")sleep(3) + local isCmdDone, isSuccess = false, false + local cmd = newShellcmd{ + cmdLine = cmdLine, + onStdout = function( buf ) + if buf then + if buf:find("\n"..okMarker.."\n",0,true) then isSuccess = true end + io.stdout:write(buf) + else isCmdDone = true end + end, + } + cmd:start() + cmd:closeSnk() + local exitCode, signal = cmd:join(42) + log:write("[DEBUG] code="..tostring(exitCode)..", signal="..tostring(signal).."\n") + while not isCmdDone do sleep(0.042) end + if not isSuccess then log:write("[WARN ] Failed on '"..eddieName.."'\n") goto nextEddie end + markEddieDone(app, eddieName) + ::nextEddie:: + end +end + + +function sortEddiesMostRecentlySeenFirst( app ) + table.sort(app.eddies, function(a, b) return a.lastSeen > b.lastSeen end) +end + + +function run( app ) + loadEddies(app) + assert(app.eddies) + removeCompletedEddies(app) + sortEddiesMostRecentlySeenFirst(app) + makeWhateverWithEddies(app) +end + + +function main() + local app = objectSeal{ + isHelp = false, + backendHost = false, + backendPort = false, + backendPath = false, + sshPort = false, + sshUser = false, + statePath = false, + stateDb = false, + eddies = false, + } + if parseArgs(app) ~= 0 then os.exit(1) end + if app.isHelp then printHelp() return end + run(app) +end + + +startOrExecute(main) + -- cgit v1.1 From eec1723ca121e33a3c4dd7307ac714d555b6cb5c Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 3 Feb 2024 19:36:55 +0100 Subject: Add performance and cleanup hint in windoof setup --- doc/note/qemu/setup-windoof.txt | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/note/qemu/setup-windoof.txt b/doc/note/qemu/setup-windoof.txt index c301416..4428ba3 100644 --- a/doc/note/qemu/setup-windoof.txt +++ b/doc/note/qemu/setup-windoof.txt @@ -24,8 +24,11 @@ Install needed software (Maybe: firefox, MsOffice, MsTeams, ..?). Manually trigger updates, reboot, updates, reboot, (likely some more turns ...) +Configure Performance options. Disable all but screen fonts. + Make sure no more updates are running. Then, I guess best is to reboot without -internet access once more to cleanup the disk: +internet access once more to cleanup the disk. Delete unused files like +trashcan or downloaded installers: SDelete.exe -nobanner -z C: -- cgit v1.1 From dcaf70491ae3464058462a82179d313dc222da95 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 6 Feb 2024 16:48:07 +0100 Subject: Fuck spotless in houston --- src/main/patch/houston/default.patch | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/src/main/patch/houston/default.patch b/src/main/patch/houston/default.patch index 52017d2..4169156 100644 --- a/src/main/patch/houston/default.patch +++ b/src/main/patch/houston/default.patch @@ -18,6 +18,32 @@ index 0ed4f7f3..b44c5693 100644 9.4.43.v20210629 +@@ -301,4 +301,25 @@ + + + ++ ++ ++ ++ com.diffplug.spotless ++ spotless-maven-plugin ++ ++ ++ spotless-apply ++ none ++ ++ ++ spotless-check ++ none ++ ++ ++ ++ true ++ ++ ++ ++ +
diff --git a/houston-process/pom.xml b/houston-process/pom.xml index 374dcb97..3c24937c 100644 --- a/houston-process/pom.xml -- cgit v1.1 From 9e085130df0a843166f27a4b0248fb5b3cb34468 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 7 Feb 2024 17:00:04 +0100 Subject: mvn deploy notes. Lua FleetHelpr. --- doc/note/maven/maven.txt | 7 + src/main/lua/paisa-fleet/RmArtifactBaseDir.lua | 177 +++++++++++++++++++++---- 2 files changed, 156 insertions(+), 28 deletions(-) diff --git a/doc/note/maven/maven.txt b/doc/note/maven/maven.txt index cdfdd9a..309fa63 100644 --- a/doc/note/maven/maven.txt +++ b/doc/note/maven/maven.txt @@ -23,6 +23,13 @@ mvn deploy -DaltDeploymentRepository=artifactory-snapshots::default::https://art mvn deploy -Dcmake.generate.skip=true -Dcmake.compile.skip=true -DaltDeploymentRepository=artifactory-releases::default::https://artifactory.tools.pnet.ch/artifactory/libs-release-local +true `# Deploy custom build 20240206` \ + && DEPLOPTS= \ + && mvn clean install -pl '!gateleen-hook-js,!gateleen-playground' \ + && mvn deploy -DskipTests -pl '!gateleen-hook-js,!gateleen-playground' $DEPLOPTS \ + && true + + ## Run e2e locally mvn verify -U -DSelBaseUrl=http://localhost:7012/apigateway/services/foo/index.html -Dskip.tests=false -Dserver.host=localhost -Dserver.port=7012 -Ptestsuite diff --git a/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua index a30ff6a..4eae033 100644 --- a/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua +++ b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua @@ -22,6 +22,8 @@ function printHelp() .." --sshPort (eg 22)\n" .." --sshUser (eg \"eddieuser\")\n" .." --state (eg \"path/to/state\")\n" + .." \n" + .." --exportLatestStatus\n" .." \n") end @@ -61,19 +63,27 @@ function parseArgs( app ) iA = iA + 1; arg = _ENV.arg[iA] if not arg then log:write("EINVAL: --state needs value\n")return end app.statePath = arg + elseif arg == "--exportLatestStatus" then + app.exportLatestStatus = true end goto nextArg ::verifyResult:: - if not app.backendHost then log:write("EINVAL: --backendHost missing\n")return end - if not app.backendPath then log:write("EINVAL: --backendPath missing\n")return end - if app.backendPath:find("^C:.") then log:write("WARN: Path looks wrong: ".. app.backendPath.."\n") end + if app.exportLatestStatus then + if not app.statePath then log:write("EINVAL: --state missing\n")return end + else + if not app.backendHost then log:write("EINVAL: --backendHost missing\n")return end + if not app.backendPath then log:write("EINVAL: --backendPath missing\n")return end + if app.backendPath:find("^C:.") then log:write("[WARN ] MSYS_NO_PATHCONV=1 likely missing? ".. app.backendPath.."\n") end + end return 0 end function removeCompletedEddies( app ) local db = getStateDb(app) - local rs = db:prepare("SELECT eddieName FROM CompletedEddies;"):execute() + local rs = db:prepare("SELECT eddieName FROM Eddie" + .." JOIN EddieLog ON Eddie.id = eddieId" + .." WHERE status = \"OK\";"):execute() local eddieNamesToRemoveSet = {} while rs:next() do assert(rs:type(1) == "TEXT", rs:type(1)) @@ -98,15 +108,28 @@ function removeCompletedEddies( app ) end -function markEddieDone( app, eddieName ) +function setEddieStatus( app, statusStr, eddieName, stderrStr, stdoutStr ) assert(type(app) == "table") assert(type(eddieName) == "string") - log:write("[DEBUG] markEddieDone(".. eddieName ..")\n") + assert(statusStr == "OK" or statusStr == "ERROR") + log:write("[DEBUG] setEddieStatus(".. eddieName ..", ".. statusStr ..")\n") local db = getStateDb(app) - local stmt = db:prepare("INSERT OR IGNORE INTO CompletedEddies(eddieName,doneAt)VALUES($eddieName, $now)") + local stmt = db:prepare("INSERT INTO Eddie(eddieName)VALUES($eddieName);") + stmt:bind("$eddieName", eddieName) + local ok, emsg = xpcall(function() + stmt:execute() + end, debug.traceback) + if not ok and not emsg:find("UNIQUE constraint failed: Eddie.eddieName") then + error(emsg) + end + local stmt = db:prepare("INSERT INTO EddieLog('when',eddieId,status,stderr,stdout)" + .."VALUES($when, (SELECT rowid FROM Eddie WHERE eddieName = $eddieName), $status, $stderr, $stdout)") stmt:reset() + stmt:bind("$when", os.date("!%Y-%m-%dT%H:%M:%S+00:00")) stmt:bind("$eddieName", eddieName) - stmt:bind("$now", os.date("!%Y-%m-%dT%H:%M:%S+00:00")) + stmt:bind("$status", statusStr) + stmt:bind("$stderr", stderrStr) + stmt:bind("$stdout", stdoutStr) stmt:execute() end @@ -114,9 +137,18 @@ end function getStateDb( app ) if not app.stateDb then app.stateDb = newSqlite{ database = app.statePath } - app.stateDb:prepare("CREATE TABLE IF NOT EXISTS CompletedEddies(" - .." eddieName TEXT UNIQUE," - .." doneAt TEXT);"):execute() + app.stateDb:prepare("CREATE TABLE IF NOT EXISTS Eddie(\n" + .." id INTEGER PRIMARY KEY,\n" + .." eddieName TEXT UNIQUE NOT NULL)\n" + ..";"):execute() + app.stateDb:prepare("CREATE TABLE IF NOT EXISTS EddieLog(\n" + .." id INTEGER PRIMARY KEY,\n" + .." 'when' TEXT NOT NULL,\n" + .." eddieId INT NOT NULL,\n" + .." status TEXT, -- OneOf OK, ERROR\n" + .." stderr TEXT NOT NULL,\n" + .." stdout TEXT NOT NULL)\n" + ..";\n"):execute() end return app.stateDb end @@ -126,6 +158,8 @@ function loadEddies( app ) local httpClient = newHttpClient{} local req = objectSeal{ base = false, + method = "GET", + path = app.backendPath .."/data/preflux/inventory", rspCode = false, rspBody = false, isDone = false, @@ -133,11 +167,14 @@ function loadEddies( app ) req.base = httpClient:request{ cls = req, host = app.backendHost, port = app.backendPort, - method = "GET", url = app.backendPath .."/data/preflux/inventory", + method = req.method, url = req.path, onRspHdr = function( rspHdr, req ) req.rspCode = rspHdr.status if rspHdr.status ~= 200 then log:write(".-----------------------------------------\n") + log:write("| ".. req.method .." ".. req.path .."\n") + log:write("| Host: ".. app.backendHost ..":".. app.backendPort .."\n") + log:write("+-----------------------------------------\n") log:write("| ".. rspHdr.proto .." ".. rspHdr.status .." ".. rspHdr.phrase .."\n") for i,h in ipairs(rspHdr.headers) do log:write("| ".. h[1] ..": ".. h[2] .."\n") @@ -159,6 +196,7 @@ function loadEddies( app ) } req.base:closeSnk() assert(req.isDone) + if req.rspCode ~= 200 then log:write("ERROR: Couldn't load eddies\n")return end local prefluxInventory = parseJSON(req.rspBody) local eddies = {} for eddieName, detail in pairs(prefluxInventory.hosts) do @@ -172,7 +210,8 @@ end function makeWhateverWithEddies( app ) - local cmdLinePre = "ssh -oConnectTimeout=5" + local ssh = "C:/Users/fankhauseand/.opt/gitPortable-2.27.0-x64/usr/bin/ssh.exe" + local cmdLinePre = ssh .." -oConnectTimeout=3 -oRemoteCommand=none" if app.sshPort then cmdLinePre = cmdLinePre .." -p".. app.sshPort end if app.sshUser then cmdLinePre = cmdLinePre .." \"-oUser=".. app.sshUser .."\"" end for k,eddie in pairs(app.eddies) do @@ -189,34 +228,59 @@ function makeWhateverWithEddies( app ) assert(isEddie or isTeddie, eddieName or"nil") local okMarker = "OK_".. math.random(10000000, 99999999) .."wCAkgQQA2AJAzAIA" local cmdLine = cmdLinePre .." ".. eddieName - -- report only - --.." \"-oRemoteCommand=test -e /data/instances/default && ls -Ahl /data/instances/default\"" - -- DELETE them - .." \"-oRemoteCommand=true" - .. " && if test -e /data/instances/default/\\${ARTIFACT_BASE_DIR}; then true" - .. " && find /data/instances/default/\\${ARTIFACT_BASE_DIR} -type d -mtime +420 -print -delete" + .." -- \"true" + .. " && if test \"".. eddieName .."\" != \"$(hostname|sed 's,.pnet.ch$,,'); then true\"" + .. " && echo WrongHost expected=".. eddieName .." actual=$(hostname|sed 's,.pnet.ch$,,') && false" .. " ;fi" - .. " && echo ".. okMarker .."" + .. " && echo hostname=$(hostname|sed 's,.pnet.ch,,')" + .. " && echo stage=${PAISA_ENV:?}" + .. " && echo Scan /data/instances/default/??ARTIFACT_BASE_DIR?" + --[[report only]] + --.. " && test -e /data/instances/default/??ARTIFACT_BASE_DIR? && ls -Ahl /data/instances/default/??ARTIFACT_BASE_DIR?" + --[[Find un-/affected eddies]] + .. " && if test -e /data/instances/default/??ARTIFACT_BASE_DIR?; then true" + .. " ;else true" + .. " && echo ".. okMarker + .. " ;fi" + --[[DELETE them]] + --.. " && if test -e /data/instances/default/??ARTIFACT_BASE_DIR?; then true" + --.. " && find /data/instances/default/??ARTIFACT_BASE_DIR? -type d -mtime +420 -print -delete" + --.. " ;fi" + --.. " && echo ".. okMarker .."" + --[[]] .. " \"" - log:write("\n[DEBUG] ".. cmdLine.."\n") - log:write("[DEBUG] sleep ...\n")sleep(3) - local isCmdDone, isSuccess = false, false + log:write("\n") + log:write("[INFO ] Try ".. eddieName .." ...\n") + log:write("[DEBUG] ".. cmdLine.."\n") + --log:write("[DEBUG] sleep ...\n")sleep(3) + local isStdioDone, isSuccess, stderrStr, stdoutStr = false, false, "", "" local cmd = newShellcmd{ cmdLine = cmdLine, onStdout = function( buf ) if buf then if buf:find("\n"..okMarker.."\n",0,true) then isSuccess = true end + stdoutStr = stdoutStr .. buf io.stdout:write(buf) - else isCmdDone = true end + else isStdioDone = true end + end, + onStderr = function( buf ) + stderrStr = buf and stderrStr .. buf or stderrStr + io.stderr:write(buf or"") end, } cmd:start() cmd:closeSnk() local exitCode, signal = cmd:join(42) - log:write("[DEBUG] code="..tostring(exitCode)..", signal="..tostring(signal).."\n") - while not isCmdDone do sleep(0.042) end - if not isSuccess then log:write("[WARN ] Failed on '"..eddieName.."'\n") goto nextEddie end - markEddieDone(app, eddieName) + if exitCode ~= 0 and signal ~= nil then + log:write("[WARN ] code="..tostring(exitCode)..", signal="..tostring(signal).."\n") + end + while not isStdioDone do sleep(0.042) end + -- Analyze outcome + if not isSuccess then + setEddieStatus(app, "ERROR", eddieName, stderrStr, stdoutStr) + goto nextEddie + end + setEddieStatus(app, "OK", eddieName, stderrStr, stdoutStr) ::nextEddie:: end end @@ -227,7 +291,63 @@ function sortEddiesMostRecentlySeenFirst( app ) end +function quoteCsvVal( v ) + local typ = type(v) + if false then + elseif typ == "string" then + if v:find("[\"\r\n]",0,false) then + v = '"'.. v:gsub('"', '""') ..'"' + end + else error("TODO_a928rzuga98oirh "..typ)end + return v +end + + +function exportLatestStatus( app ) + local snk = io.stdout + local db = getStateDb(app) + local stmt = db:prepare("SELECT \"when\",eddieName,status,stderr,stdout FROM EddieLog" + .." JOIN Eddie ON Eddie.id = eddieId" + .." ORDER BY eddieId,[when]" + .." ;") + rs = stmt:execute() + snk:write("c;when;eddieName;status;stderr;stdout\n") + local prevWhen, prevEddieName, prevStatus, prevStderr, prevStdout + local qt = quoteCsvVal + while rs:next() do + local when , eddieName , status , stderr , stdout + = rs:value(1), rs:value(2), rs:value(3), rs:value(4), rs:value(5) + --log:write("[DEBUG] "..tostring(when).." "..tostring(eddieName).." "..tostring(status).."\n") + assert(when and eddieName and status and stderr and stdout) + if eddieName == prevEddieName then + if not prevWhen or when > prevWhen then + --log:write("[DEBUG] ".. when .." ".. eddieName .." take\n") + goto assignPrevThenNextEntry + else + --log:write("[DEBUG] ".. when .." ".. eddieName .." obsolete\n") + goto nextEntry + end + elseif prevEddieName then + --log:write("[DEBUG] ".. when .." ".. eddieName .." Eddie complete\n") + snk:write("r;".. qt(when) ..";".. qt(eddieName) ..";".. qt(status) ..";".. qt(stderr) ..";".. qt(stdout) .."\n") + else + --log:write("[DEBUG] ".. when .." ".. eddieName .." Another eddie\n") + goto assignPrevThenNextEntry + end + ::assignPrevThenNextEntry:: + --[[]] prevWhen, prevEddieName, prevStatus, prevStderr, prevStdout + = when , eddieName , status , stderr , stdout + ::nextEntry:: + end + snk:write("t;status;OK\n") +end + + function run( app ) + if app.exportLatestStatus then + exportLatestStatus(app) + return + end loadEddies(app) assert(app.eddies) removeCompletedEddies(app) @@ -246,6 +366,7 @@ function main() sshUser = false, statePath = false, stateDb = false, + exportLatestStatus = false, eddies = false, } if parseArgs(app) ~= 0 then os.exit(1) end -- cgit v1.1 From 936a050d8a1fc0bc2cb3153f4b14e1323c56cd53 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 8 Feb 2024 11:13:56 +0100 Subject: Fook cmd seems to work this way --- src/main/lua/paisa-fleet/FindFullDisks.lua | 235 +++++++++++++++++++++++++++++ 1 file changed, 235 insertions(+) create mode 100644 src/main/lua/paisa-fleet/FindFullDisks.lua diff --git a/src/main/lua/paisa-fleet/FindFullDisks.lua b/src/main/lua/paisa-fleet/FindFullDisks.lua new file mode 100644 index 0000000..d85f74e --- /dev/null +++ b/src/main/lua/paisa-fleet/FindFullDisks.lua @@ -0,0 +1,235 @@ + +local SL = require("scriptlee") +local newHttpClient = SL.newHttpClient +local newShellcmd = SL.newShellcmd +--local newSqlite = SL.newSqlite +local objectSeal = SL.objectSeal +local parseJSON = SL.parseJSON +--local sleep = SL.posix.sleep +local startOrExecute = SL.reactor.startOrExecute +SL = nil +local log = io.stdout + + +function printHelp() + io.write("\n" + .." WARN: This is experimental.\n" + .." \n" + .." Options:\n" + .." --backendHost (eg \"localhost\")\n" + .." --backendPort (eg 80)\n" + .." --sshPort (eg 22)\n" + .." --sshUser (eg \"eddieuser\")\n" + .." --state (eg \"path/to/state\")\n" + .." \n") +end + + +function parseArgs( app ) + app.backendPort = 80 + --app.statePath = ":memory:" + local iA = 0 + ::nextArg:: + iA = iA + 1 + local arg = _ENV.arg[iA] + if not arg then + goto verifyResult + elseif arg == "--help" then + app.isHelp = true return 0 + elseif arg == "--backendHost" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --backendHost needs value\n")return end + app.backendHost = arg + elseif arg == "--backendPort" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --backendPort needs value\n")return end + app.backendHost = arg + elseif arg == "--sshPort" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --sshPort needs value\n")return end + app.sshPort = arg + elseif arg == "--sshUser" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --sshUser needs value\n")return end + app.sshUser = arg + --elseif arg == "--state" then + -- iA = iA + 1; arg = _ENV.arg[iA] + -- if not arg then log:write("EINVAL: --state needs value\n")return end + -- app.statePath = arg + else + log:write("EINVAL: ".. arg .."\n")return + end + goto nextArg + ::verifyResult:: + if not app.backendHost then log:write("EINVAL: --backendHost missing\n")return end + return 0 +end + + +function doWhateverWithDevices( app ) + for k, dev in pairs(app.devices) do + if dev.eddieName ~= "eddie00003" or dev.type == "LUNKWILL" then + log:write("[DEBUG] Skip '".. dev.eddieName .."'->'".. dev.hostname .."'\n") + goto nextDevice + end + log:write("\n") + log:write(" hostname "..tostring(dev.hostname).."\n") + log:write("eddieName "..tostring(dev.eddieName).."\n") + log:write(" type "..tostring(dev.type).."\n") + log:write(" lastSeen "..tostring(dev.lastSeen).."\n") + assert(dev.type == "FOOK") + local cmd = objectSeal{ + base = false, + cmdLine = false, + } + local fookCmd = "echo fook-says-hi && hostname" + local eddieCmd = "ssh" + .." -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null -p7022 isa@fook" + .." \\\n --" + .." sh -c 'true && ".. fookCmd:gsub("'", "'\"'\"'") .."'" + local localCmd = assert(os.getenv("SSH_EXE"), "environ.SSH_EXE missing") + .." -oRemoteCommand=none -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null -p7022 isa@eddie00003" + .." \\\n --" + .." sh -c 'true && ".. eddieCmd:gsub("'", "'\"'\"'") .."'" + do + -- TODO get rid of use-tmp-file-as-script workaround + local tmpPath = assert(os.getenv("TMP")) .."/b30589uj30oahujotehuj.sh" + log:write("[DEBUG] tmpPath '".. tmpPath .."'\n") + local tmpFile = assert(io.open(tmpPath, "wb"), "Failed to open '".. tmpPath .."'") + tmpFile:write("#!/bin/sh\n".. localCmd .."\n") + tmpFile:close() + error("TODO_238hu38h") + end + cmd.cmdLine = localCmd + local okMarker = "OK_".. math.random(1000000,9999999) .."q958zhug3ojhat" + --cmd.cmdLine = cmd.cmdLine .." -- true" + -- .." && echo hostname=$(hostname|sed s_.pnet.ch__)" + -- .." && echo stage=$PAISA_ENV" + -- .." && whoami" + -- .." && echo ".. assert(okMarker) .."" + log:write("[DEBUG] ".. cmd.cmdLine .."\n") + cmd.base = newShellcmd{ + cls = cmd, + cmdLine = cmd.cmdLine, + onStdout = function( buf, cmd ) io.write(buf or"")end, + --onStderr = function( buf, cmd )end, + } + cmd.base:start() + cmd.base:closeSnk() + local exit, signal = cmd.base:join(7) + if exit ~= 0 or signal ~= nil then + error(tostring(exit).." "..tostring(signal)) + end + error("TODO_938thu") + ::nextDevice:: + end +end + + +function sortDevicesMostRecentlySeenFirst( app ) + table.sort(app.devices, function(a, b) return a.lastSeen > b.lastSeen end) +end + + +function fetchDevices( app ) + local req = objectSeal{ + base = false, + method = "GET", + uri = "/houston/vehicle/inventory/v1/info/devices", + rspCode = false, + rspBody = false, + isDone = false, + } + req.base = app.http:request{ + cls = req, connectTimeoutMs = 3000, + host = app.backendHost, port = app.backendPort, + method = req.method, url = req.uri, + onRspHdr = function( rspHdr, req ) + req.rspCode = rspHdr.status + if rspHdr.status ~= 200 then + log:write(".-----------------------------------------\n") + log:write("| ".. req.method .." ".. req.uri .."\n") + log:write("| Host: ".. app.backendHost ..":".. app.backendPort .."\n") + log:write("+-----------------------------------------\n") + log:write("| ".. rspHdr.proto .." ".. rspHdr.status .." ".. rspHdr.phrase .."\n") + for i,h in ipairs(rspHdr.headers) do + log:write("| ".. h[1] ..": ".. h[2] .."\n") + end + log:write("| \n") + end + end, + onRspChunk = function( buf, req ) + if req.rspCode ~= 200 then log:write("| ".. buf:gsub("\n", "\n| ")) return end + if buf then + if not req.rspBody then req.rspBody = buf + else req.rspBody = req.rspBody .. buf end + end + end, + onRspEnd = function( req ) + if req.rspCode ~= 200 then log:write("\n'-----------------------------------------\n") end + req.isDone = true + end, + } + req.base:closeSnk() + assert(req.isDone) + if req.rspCode ~= 200 then log:write("ERROR: Couldn't fetch devices\n")return end + assert(not app.devices) + app.devices = {} + --io.write(req.rspBody)io.write("\n") + for iD, device in pairs(parseJSON(req.rspBody).devices) do + print("Wa", iD, device) + --for k,v in pairs(device)do print("W",k,v)end + -- TODO how to access 'device.type'? + local hostname , eddieName , lastSeen + = device.hostname:value(), device.eddieName:value(), device.lastSeen:value() + local typ + if false then + elseif hostname:find("^eddie%d%d%d%d%d$") then + typ = "EDDIE" + elseif hostname:find("^fook%-[a-z0-9]+$") then + typ = "FOOK" + elseif hostname:find("^lunkwill%-[a-z0-9]+$") then + typ = "LUNKWILL" + elseif hostname:find("^fook$") then + log:write("[WARN ] WTF?!? '"..hostname.."'\n") + typ = false + else error("TODO_359zh8i3wjho "..hostname) end + table.insert(app.devices, objectSeal{ + hostname = hostname, + eddieName = eddieName, + type = typ, + lastSeen = lastSeen, + }) + end +end + + +function run( app ) + fetchDevices(app) + sortDevicesMostRecentlySeenFirst(app) + doWhateverWithDevices(app) + error("TODO_a8uaehjgae9o8it") +end + + +function main() + local app = objectSeal{ + isHelp = false, + backendHost = false, + backendPort = false, + sshPort = false, + sshUser = false, +-- statePath = false, +-- stateDb = false, + http = newHttpClient{}, + devices = false, + } + if parseArgs(app) ~= 0 then os.exit(1) end + if app.isHelp then printHelp() return end + run(app) +end + + +startOrExecute(main) + + -- cgit v1.1 From 81cf7761c3ef30d365999b667bb919084e539acc Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 8 Feb 2024 18:55:22 +0100 Subject: Fine-tune & cleanup paisa-fleet/*.lua --- src/main/lua/paisa-fleet/FindFullDisks.lua | 200 ++++++++++++++++++------- src/main/lua/paisa-fleet/RmArtifactBaseDir.lua | 2 + 2 files changed, 147 insertions(+), 55 deletions(-) diff --git a/src/main/lua/paisa-fleet/FindFullDisks.lua b/src/main/lua/paisa-fleet/FindFullDisks.lua index d85f74e..7bddbb0 100644 --- a/src/main/lua/paisa-fleet/FindFullDisks.lua +++ b/src/main/lua/paisa-fleet/FindFullDisks.lua @@ -2,12 +2,12 @@ local SL = require("scriptlee") local newHttpClient = SL.newHttpClient local newShellcmd = SL.newShellcmd ---local newSqlite = SL.newSqlite +local newSqlite = SL.newSqlite local objectSeal = SL.objectSeal local parseJSON = SL.parseJSON ---local sleep = SL.posix.sleep local startOrExecute = SL.reactor.startOrExecute SL = nil + local log = io.stdout @@ -27,7 +27,9 @@ end function parseArgs( app ) app.backendPort = 80 - --app.statePath = ":memory:" + app.sshPort = 22 + app.sshUser = os.getenv("USERNAME") or false + app.statePath = ":memory:" local iA = 0 ::nextArg:: iA = iA + 1 @@ -52,75 +54,130 @@ function parseArgs( app ) iA = iA + 1; arg = _ENV.arg[iA] if not arg then log:write("EINVAL: --sshUser needs value\n")return end app.sshUser = arg - --elseif arg == "--state" then - -- iA = iA + 1; arg = _ENV.arg[iA] - -- if not arg then log:write("EINVAL: --state needs value\n")return end - -- app.statePath = arg + elseif arg == "--state" then + iA = iA + 1; arg = _ENV.arg[iA] + if not arg then log:write("EINVAL: --state needs value\n")return end + app.statePath = arg else log:write("EINVAL: ".. arg .."\n")return end goto nextArg ::verifyResult:: if not app.backendHost then log:write("EINVAL: --backendHost missing\n")return end + if not app.sshUser then log:write("EINVAL: --sshUser missing")return end return 0 end +function getStateDb(app) + if not app.stateDb then + local db = newSqlite{ database = assert(app.statePath) } + -- TODO normalize scheme + db:prepare("CREATE TABLE IF NOT EXISTS DeviceDfLog(\n" + .." id INTEGER PRIMARY KEY,\n" + .." \"when\" TEXT NOT NULL,\n" -- "https://xkcd.com/1179" + .." hostname TEXT NOT NULL,\n" + .." eddieName TEXT NOT NULL,\n" + .." rootPartitionUsedPercent INT,\n" + .." varLibDockerUsedPercent INT,\n" + .." varLogUsedPercent INT,\n" + .." dataUsedPercent INT,\n" + .." stderr TEXT NOT NULL,\n" + .." stdout TEXT NOT NULL)\n" + ..";"):execute() + app.stateDb = db + end + return app.stateDb +end + + +function storeDiskFullResult( app, hostname, eddieName, stderrBuf, stdoutBuf ) + assert(app and hostname and eddieName and stderrBuf and stdoutBuf); + local rootPartitionUsedPercent = stdoutBuf:match("\n/[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /\n") + local varLibDockerUsedPercent = stdoutBuf:match("\n[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /var/lib/docker\n") + local dataUsedPercent = stdoutBuf:match("\n[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /data\n") + local varLogUsedPercent = stdoutBuf:match("\n[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /var/log\n") + local stmt = getStateDb(app):prepare("INSERT INTO DeviceDfLog(" + .." \"when\", hostname, eddieName, stderr, stdout," + .." rootPartitionUsedPercent, dataUsedPercent, varLibDockerUsedPercent, varLogUsedPercent, dataUsedPercent" + ..")VALUES(" + .." $when, $hostname, $eddieName, $stderr, $stdout," + .." $rootPartitionUsedPercent, $dataUsedPercent, $varLibDockerUsedPercent, $varLogUsedPercent, $dataUsedPercent);") + stmt:bind("$when", os.date("!%Y-%m-%dT%H:%M:%SZ")) + stmt:bind("$hostname", hostname) + stmt:bind("$eddieName", eddieName) + stmt:bind("$stderr", stderrBuf) + stmt:bind("$stdout", stdoutBuf) + stmt:bind("$rootPartitionUsedPercent", rootPartitionUsedPercent) + stmt:bind("$varLibDockerUsedPercent", varLibDockerUsedPercent) + stmt:bind("$varLogUsedPercent", varLogUsedPercent) + stmt:bind("$dataUsedPercent", dataUsedPercent) + stmt:execute() +end + + function doWhateverWithDevices( app ) for k, dev in pairs(app.devices) do - if dev.eddieName ~= "eddie00003" or dev.type == "LUNKWILL" then - log:write("[DEBUG] Skip '".. dev.eddieName .."'->'".. dev.hostname .."'\n") - goto nextDevice - end log:write("\n") - log:write(" hostname "..tostring(dev.hostname).."\n") - log:write("eddieName "..tostring(dev.eddieName).."\n") - log:write(" type "..tostring(dev.type).."\n") - log:write(" lastSeen "..tostring(dev.lastSeen).."\n") - assert(dev.type == "FOOK") - local cmd = objectSeal{ - base = false, - cmdLine = false, - } - local fookCmd = "echo fook-says-hi && hostname" - local eddieCmd = "ssh" - .." -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null -p7022 isa@fook" + log:write("[INFO ] About to inspect '".. dev.hostname .."' (@ ".. dev.eddieName ..")\n") + local fookCmd = "true" + .." && HOSTNAME=$(hostname|sed 's_.isa.localdomain__')" + .." && STAGE=$PAISA_ENV" + .." && printf \"remoteHostname=$HOSTNAME, remoteStage=$STAGE\\n\"" + -- on some machine, df failed with "Stale file handle" But I want to continue + -- with next device regardless of such errors. + .." && df || true" + local eddieCmd = "true" + .." && HOSTNAME=$(hostname|sed 's_.pnet.ch__')" + .." && STAGE=$PAISA_ENV" + .." && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\"" + .." && if test \"${HOSTNAME}\" != \"".. dev.eddieName .."\"; then true" + .." && echo wrong host. Want ".. dev.eddieName .." found $HOSTNAME && false" + .." ;fi" + .." && ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null" + .." -p".. app.sshPort .." ".. app.sshUser .."@".. ((dev.type == "FOOK")and"fook"or dev.hostname) .." \\\n --" .." sh -c 'true && ".. fookCmd:gsub("'", "'\"'\"'") .."'" local localCmd = assert(os.getenv("SSH_EXE"), "environ.SSH_EXE missing") - .." -oRemoteCommand=none -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null -p7022 isa@eddie00003" + .." -oRemoteCommand=none -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null" + .." -p".. app.sshPort .." ".. app.sshUser .."@".. dev.eddieName .."" .." \\\n --" .." sh -c 'true && ".. eddieCmd:gsub("'", "'\"'\"'") .."'" - do - -- TODO get rid of use-tmp-file-as-script workaround - local tmpPath = assert(os.getenv("TMP")) .."/b30589uj30oahujotehuj.sh" - log:write("[DEBUG] tmpPath '".. tmpPath .."'\n") - local tmpFile = assert(io.open(tmpPath, "wb"), "Failed to open '".. tmpPath .."'") - tmpFile:write("#!/bin/sh\n".. localCmd .."\n") - tmpFile:close() - error("TODO_238hu38h") - end - cmd.cmdLine = localCmd - local okMarker = "OK_".. math.random(1000000,9999999) .."q958zhug3ojhat" - --cmd.cmdLine = cmd.cmdLine .." -- true" - -- .." && echo hostname=$(hostname|sed s_.pnet.ch__)" - -- .." && echo stage=$PAISA_ENV" - -- .." && whoami" - -- .." && echo ".. assert(okMarker) .."" - log:write("[DEBUG] ".. cmd.cmdLine .."\n") + -- TODO get rid of this ugly use-tmp-file-as-script workaround + local tmpPath = assert(os.getenv("TMP"), "environ.TMP missing"):gsub("\\", "/") .."/b30589uj30oahujotehuj.sh" + --log:write("[DEBUG] tmpPath '".. tmpPath .."'\n") + local tmpFile = assert(io.open(tmpPath, "wb"), "Failed to open '".. tmpPath .."'") + tmpFile:write("#!/bin/sh\n".. localCmd .."\n") + tmpFile:close() + --log:write("[DEBUG] tmpPath ".. tmpPath .."\n") + -- EndOf kludge + local cmd = objectSeal{ + base = false, + stdoutBuf = false, + stderrBuf = false, + } cmd.base = newShellcmd{ cls = cmd, - cmdLine = cmd.cmdLine, - onStdout = function( buf, cmd ) io.write(buf or"")end, - --onStderr = function( buf, cmd )end, + cmdLine = "sh \"".. tmpPath .."\"", + onStdout = function( buf, cmd ) + if buf then cmd.stdoutBuf = cmd.stdoutBuf and cmd.stdoutBuf .. buf or buf end + end, + onStderr = function( buf, cmd ) + if buf then cmd.stderrBuf = cmd.stderrBuf and cmd.stderrBuf .. buf or buf end + end, } cmd.base:start() cmd.base:closeSnk() - local exit, signal = cmd.base:join(7) + local exit, signal = cmd.base:join(17) + if exit == 255 and signal == nil then + log:write("[DEBUG] fd2: ".. cmd.stderrBuf:gsub("\n", "\n[DEBUG] fd2: "):gsub("\n[DEBUG] fd2: $", "") .."\n") + goto nextDevice + end + log:write("[DEBUG] fd1: ".. cmd.stdoutBuf:gsub("\n", "\n[DEBUG] fd1: "):gsub("\n[DEBUG] fd1: $", "") .."\n") + storeDiskFullResult(app, dev.hostname, dev.eddieName, cmd.stderrBuf, cmd.stdoutBuf) if exit ~= 0 or signal ~= nil then - error(tostring(exit).." "..tostring(signal)) + error("exit=".. tostring(exit)..", signal="..tostring(signal)) end - error("TODO_938thu") ::nextDevice:: end end @@ -131,6 +188,39 @@ function sortDevicesMostRecentlySeenFirst( app ) end +-- Don't want to visit just seen devices over and over again. So drop devices +-- we've recently seen from our devices-to-visit list. +function dropDevicesRecentlySeen( app ) + -- Collect recently seen devices. + local devicesToRemove = {} + local st = getStateDb(app):prepare("SELECT hostname FROM DeviceDfLog WHERE \"when\" > $tresholdDate") + st:bind("$tresholdDate", os.date("!%Y-%m-%dT%H:%M:%SZ", os.time()-42*3600)) + local rs = st:execute() + while rs:next() do + local hostname = rs:value(1) + devicesToRemove[hostname] = true + end + -- Remove selected devices + local numKeep, numDrop = 0, 0 + local iD = 0 while true do iD = iD + 1 + local device = app.devices[iD] + if not device then break end + if devicesToRemove[device.hostname] then + --log:write("[DEBUG] Drop '".. device.hostname .."' (".. device.eddieName ..")\n") + numDrop = numDrop + 1 + app.devices[iD] = app.devices[#app.devices] + app.devices[#app.devices] = nil + iD = iD - 1 + else + --log:write("[DEBUG] Keep '".. device.hostname .."' (".. device.eddieName ..")\n") + numKeep = numKeep + 1 + end + end + log:write("[INFO ] Of "..(numKeep+numDrop).." devices from state visit ".. numKeep + .." and skip ".. numDrop .." (bcause seen recently)\n") +end + + function fetchDevices( app ) local req = objectSeal{ base = false, @@ -152,9 +242,7 @@ function fetchDevices( app ) log:write("| Host: ".. app.backendHost ..":".. app.backendPort .."\n") log:write("+-----------------------------------------\n") log:write("| ".. rspHdr.proto .." ".. rspHdr.status .." ".. rspHdr.phrase .."\n") - for i,h in ipairs(rspHdr.headers) do - log:write("| ".. h[1] ..": ".. h[2] .."\n") - end + for i,h in ipairs(rspHdr.headers) do log:write("| ".. h[1] ..": ".. h[2] .."\n") end log:write("| \n") end end, @@ -175,9 +263,10 @@ function fetchDevices( app ) if req.rspCode ~= 200 then log:write("ERROR: Couldn't fetch devices\n")return end assert(not app.devices) app.devices = {} + log:write("[DEBUG] rspBody.len is ".. req.rspBody:len() .."\n") --io.write(req.rspBody)io.write("\n") for iD, device in pairs(parseJSON(req.rspBody).devices) do - print("Wa", iD, device) + --print("Wa", iD, device) --for k,v in pairs(device)do print("W",k,v)end -- TODO how to access 'device.type'? local hostname , eddieName , lastSeen @@ -201,14 +290,15 @@ function fetchDevices( app ) lastSeen = lastSeen, }) end + log:write("[INFO ] Fetched ".. #app.devices .." devices.\n") end function run( app ) fetchDevices(app) - sortDevicesMostRecentlySeenFirst(app) + dropDevicesRecentlySeen(app) + --sortDevicesMostRecentlySeenFirst(app) doWhateverWithDevices(app) - error("TODO_a8uaehjgae9o8it") end @@ -219,8 +309,8 @@ function main() backendPort = false, sshPort = false, sshUser = false, --- statePath = false, --- stateDb = false, + statePath = false, + stateDb = false, http = newHttpClient{}, devices = false, } diff --git a/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua index 4eae033..949d1fe 100644 --- a/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua +++ b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua @@ -65,6 +65,8 @@ function parseArgs( app ) app.statePath = arg elseif arg == "--exportLatestStatus" then app.exportLatestStatus = true + else + log:write("EINVAL: ".. arg .."\n")return end goto nextArg ::verifyResult:: -- cgit v1.1 From 75af592ef42ff41f0d2b7b674cf4753841c98fc5 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 8 Feb 2024 19:08:01 +0100 Subject: Cleanup in FindFullDisks.lua --- src/main/lua/paisa-fleet/FindFullDisks.lua | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/src/main/lua/paisa-fleet/FindFullDisks.lua b/src/main/lua/paisa-fleet/FindFullDisks.lua index 7bddbb0..9963838 100644 --- a/src/main/lua/paisa-fleet/FindFullDisks.lua +++ b/src/main/lua/paisa-fleet/FindFullDisks.lua @@ -118,8 +118,7 @@ end function doWhateverWithDevices( app ) for k, dev in pairs(app.devices) do - log:write("\n") - log:write("[INFO ] About to inspect '".. dev.hostname .."' (@ ".. dev.eddieName ..")\n") + log:write("[INFO ] Inspecting '".. dev.hostname .."' (@ ".. dev.eddieName ..") ...\n") local fookCmd = "true" .." && HOSTNAME=$(hostname|sed 's_.isa.localdomain__')" .." && STAGE=$PAISA_ENV" @@ -153,27 +152,25 @@ function doWhateverWithDevices( app ) -- EndOf kludge local cmd = objectSeal{ base = false, - stdoutBuf = false, - stderrBuf = false, + stdoutBuf = {}, + stderrBuf = {}, } cmd.base = newShellcmd{ cls = cmd, cmdLine = "sh \"".. tmpPath .."\"", - onStdout = function( buf, cmd ) - if buf then cmd.stdoutBuf = cmd.stdoutBuf and cmd.stdoutBuf .. buf or buf end - end, - onStderr = function( buf, cmd ) - if buf then cmd.stderrBuf = cmd.stderrBuf and cmd.stderrBuf .. buf or buf end - end, + onStdout = function( buf, cmd ) table.insert(cmd.stdoutBuf, buf or"") end, + onStderr = function( buf, cmd ) table.insert(cmd.stderrBuf, buf or"") end, } cmd.base:start() cmd.base:closeSnk() local exit, signal = cmd.base:join(17) + cmd.stderrBuf = table.concat(cmd.stderrBuf) + cmd.stdoutBuf = table.concat(cmd.stdoutBuf) if exit == 255 and signal == nil then - log:write("[DEBUG] fd2: ".. cmd.stderrBuf:gsub("\n", "\n[DEBUG] fd2: "):gsub("\n[DEBUG] fd2: $", "") .."\n") + log:write("[DEBUG] fd2: ".. cmd.stderrBuf:gsub("\n", "\n[DEBUG] fd2: "):gsub("\n%[DEBUG%] fd2: $", "") .."\n") goto nextDevice end - log:write("[DEBUG] fd1: ".. cmd.stdoutBuf:gsub("\n", "\n[DEBUG] fd1: "):gsub("\n[DEBUG] fd1: $", "") .."\n") + log:write("[DEBUG] fd1: ".. cmd.stdoutBuf:gsub("\n", "\n[DEBUG] fd1: "):gsub("\n%[DEBUG%] fd1: $", "") .."\n") storeDiskFullResult(app, dev.hostname, dev.eddieName, cmd.stderrBuf, cmd.stdoutBuf) if exit ~= 0 or signal ~= nil then error("exit=".. tostring(exit)..", signal="..tostring(signal)) -- cgit v1.1 From b1eb619500b0c6c80d590af2f02edd350bb178ad Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 11 Feb 2024 16:58:43 +0100 Subject: Doc qemu mount qcow2 on host for inspection --- doc/note/qemu/qemu.txt | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index b267698..f7c9498 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -70,6 +70,17 @@ qemu-system-x86_64 \ -device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet \ +## Inspect qcow2 by host mounting it + + $SUDO modprobe nbd + $SUDO qemu-nbd -c /dev/nbd0 /path/to/my.qcow2 + echo 'p' | $SUDO fdisk /dev/nbd0 + $SUDO mount -o ro /dev/nbd0p2 /mnt/q + $SUDO umount /mnt/q `# cleanup` + qemu-nbd -d /dev/nbd0 `# cleanup` + $SUDO rmmod nbd `# cleanup` + + ### Example manual adapter setup (inside VM) for socket mcast network: true \ && ADDR=192.168.42.101/24 \ @@ -271,4 +282,5 @@ TODO: move this to a better place. Eg: debian/setup.txt or whatever. - [qemu monitor via stdio](https://unix.stackexchange.com/a/57835/292722) - [qemu raspberry pi TODO](https://blog.agchapman.com/using-qemu-to-emulate-a-raspberry-pi/) - [connect VM networks](https://qemu.weilnetz.de/doc/6.0/system/invocation.html#sec-005finvocation) +- [inspect qcow2 mount host browse](https://www.jamescoyle.net/how-to/1818-access-a-qcow2-virtual-disk-image-from-the-host) -- cgit v1.1 From 9abe89ec4f0db26cea3c72508b5cb1401086e24d Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 13 Feb 2024 12:27:53 +0100 Subject: (ff-plugin) Try fix some glitches by keeping our own state. --- src/main/firefox/gaga-plugin/main.js | 149 ++++++++++++++++------------------- 1 file changed, 69 insertions(+), 80 deletions(-) diff --git a/src/main/firefox/gaga-plugin/main.js b/src/main/firefox/gaga-plugin/main.js index 2a5bbae..4447719 100644 --- a/src/main/firefox/gaga-plugin/main.js +++ b/src/main/firefox/gaga-plugin/main.js @@ -1,15 +1,10 @@ /* - * For how to install see: - * - * "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/tree/doc/note/firefox/firefox.txt" + * [How to install](UnspecifiedGarbage/doc/note/firefox/firefox.txt) */ ;(function(){ try{ var NDEBUG = false; var STATUS_INIT = 1; - var STATUS_RUNNING = 2; - var STATUS_DONE = 3; - var STATUS_OBSOLETE = 4; var NOOP = function(){}; var LOGERR = console.error.bind(console); var N = null; @@ -19,11 +14,10 @@ function main(){ var app = Object.seal({ ui: {}, - status: Object.seal({ - checklistBtn: STATUS_INIT, - developmentBtn: STATUS_INIT, - }), lastClickEpochMs: 0, + wantChecklistExpanded: false, + wantDevelopmentExpanaded: false, + wantBigTemplateExpanded: false, }); if( NDEBUG ){ setTimeout = window.setTimeout; @@ -32,14 +26,16 @@ }else{ /* fix broken tooling */ setTimeout = setTimeoutWithCatch.bind(0, app); logErrors = logErrorsImpl.bind(N, app); - LOGDBG = console.debug.bind(console); + LOGDBG = console.debug.bind(console, "[gaga-plugin]"); } document.addEventListener("DOMContentLoaded", logErrors.bind(N, onDOMContentLoaded, app)); + scheduleNextStateCheck(app); + LOGDBG("gaga-plugin initialized"); } function onDOMContentLoaded( app ){ - cleanupClutter(app); + LOGDBG("onDOMContentLoaded()"); attachDomObserver(app); } @@ -50,83 +46,58 @@ } - function onDomHasChangedSomehow( app, changes, mutationObserver ){ - var nowEpochMs = Date.now(); - if( (app.lastClickEpochMs + 2000) > nowEpochMs ){ - LOGDBG("ignore, likely triggered by user."); - return; } - var needsReEval = false; - for( var change of changes ){ - if( change.target.nodeName != "BUTTON" ) continue; - var isAriaExpanded = (change.attributeName == "aria-expanded"); - var isChildAdded = (change.addedNodes.length > 0); - var isChildRemoved = (change.removedNodes.length > 0); - var isChildAddedOrRemoved = isChildAdded || isChildRemoved; - if( !isAriaExpanded && !isChildAddedOrRemoved ) continue; - if( isAriaExpanded ){ - LOGDBG("Suspicious, isExpanded: ", change.target); - needsReEval = true; break; - } - if( !isChildAddedOrRemoved ) continue; - var isBloatyChecklistBtnStillThere = document.body.contains(getBloatyChecklistBtn(app)); - if( !isBloatyChecklistBtnStillThere ){ - LOGDBG("Suspicious, btn lost"); - needsReEval = true; break; - } - var isBloatyDevelopmentBtnStillThere = document.body.contains(getBloatyDevelopmentBtn(app)); - if( !isBloatyDevelopmentBtnStillThere ){ - LOGDBG("Suspicious, btn lost"); - needsReEval = true; break; - } - } - if( needsReEval ){ - LOGDBG("Change detected! Eval again"); - app.ui.bloatyChecklistBtn = null; - app.ui.bloatyDevelopmentBtn = null; - setTimeout(cleanupClutter, 42, app); + function scheduleNextStateCheck( app ){ + //LOGDBG("scheduleNextStateCheck()"); + if( app.stateCheckTimer ){ + LOGDBG("Why is stateCheckTimer not zero?", app.stateCheckTimer); } + app.stateCheckTimer = setTimeout(function(){ + app.stateCheckTimer = null; + scheduleNextStateCheck(app); + performStateCheck(app); + }, 42); } - function cleanupClutter( app ){ - if( app.bloatyChecklistDone != STATUS_RUNNING ){ - app.bloatyChecklistDone = STATUS_OBSOLETE - setTimeout(hideBloatyButton, 0, app, "checklistBtn"); - } - if( app.bloatyDevelopmentDone != STATUS_RUNNING ){ - app.bloatyDevelopmentDone = STATUS_OBSOLETE; - setTimeout(hideBloatyButton, 0, app, "developmentBtn"); - } - if( app.bloatyDevelopmentDone != STATUS_RUNNING ){ - app.bloatyDevelopmentDone = STATUS_OBSOLETE; - setTimeout(hideBloatyButton, 0, app, "bigTemplateBtn"); + function performStateCheck( app ){ + var buttons = [ "checklistBtn", "developmentBtn", "bigTemplateBtn" ]; + var wantKey = [ "wantChecklistExpanded", "wantDevelopmentExpanaded", "wantBigTemplateExpanded" ]; + for( var i = 0 ; i < buttons.length ; ++i ){ + var btnKey = buttons[i]; + var btnElem = getBloatyButton(app, btnKey); + if( !btnElem ) continue; + var isExpanded = isAriaBtnExpanded(app, btnElem) + var wantExpanded = app[wantKey[i]]; + //LOGDBG(btnKey +" expanded is", isExpanded); + if( isExpanded && !wantExpanded ){ + collapseAriaBtn(app, btnElem); + } } } - function setLastClickTimeToNow( app ){ app.lastClickEpochMs = Date.now(); } + function onDomHasChangedSomehow( app, changes, mutationObserver ){ + var nowEpochMs = Date.now(); + LOGDBG("DOM Change detected!"); + /*refresh dom refs so check will work on correct elems*/ + Object.keys(app.ui).forEach(function( key ){ + app.ui[key] = null; + }); + } - function hideBloatyButton( app, btnKey ){ - if( app.status[btnKey] == STATUS_DONE ){ - LOGDBG(btnKey +" now hidden"); - return; } - app.status[btnKey] == STATUS_RUNNING; - var btn = getBloatyButton(app, btnKey); - do{ - if( !btn ){ LOGDBG(btnKey +" not found. DOM maybe not yet ready?"); break; } - var isExpanded = isAriaBtnExpanded(app, btn); - if( isExpanded === true ){ - LOGDBG(btnKey +".click()"); - btn.click(); - }else if( isExpanded === false ){ - app.status[btnKey] = STATUS_DONE; - }else{ - throw Error("Neither true nor false "+ typeof(isExpanded) +" "+ isExpanded); - } - }while(0); - /* try later */ - setTimeout(hideBloatyButton, 16, app, btnKey); + function onBloatyChecklistBtnMousedown( app ){ + app.wantChecklistExpanded = !app.wantChecklistExpanded; + } + + + function onBloatyDevelopmentBtnMousedown( app ){ + app.wantDevelopmentExpanaded = !app.wantDevelopmentExpanaded; + } + + + function onBloatyBigTemplateBtnMousedown( app ){ + app.wantBigTemplateExpanded = !app.wantBigTemplateExpanded; } @@ -135,19 +106,22 @@ }else if( btnKey == "checklistBtn" ){ var selector = "button[aria-label=Checklists]"; var uiKey = "bloatyChecklistBtn"; + var onMousedown = onBloatyChecklistBtnMousedown; }else if( btnKey == "developmentBtn" ){ var selector = "button[aria-label=Development]"; var uiKey = "bloatyDevelopmentBtn"; + var onMousedown = onBloatyDevelopmentBtnMousedown; }else if( btnKey == "bigTemplateBtn" ){ var selector = "button[aria-label=BigTemplate]"; var uiKey = "bloatyBigTemplateBtn"; + var onMousedown = onBloatyBigTemplateBtnMousedown; }else{ throw Error(btnKey); } if( !app.ui[uiKey] ){ var btn = fetchUiRefOrNull(app, document, selector); if( btn ){ - btn.addEventListener("mousedown", logErrors.bind(N, setLastClickTimeToNow, app)); + btn.addEventListener("mousedown", logErrors.bind(N, onMousedown, app)); app.ui[uiKey] = btn; } } @@ -155,6 +129,21 @@ } + function collapseAriaBtn( app, btnElem ){ + do{ + var isExpanded = isAriaBtnExpanded(app, btnElem); + if( isExpanded === true ){ + LOGDBG("click()"); + btnElem.click(); + }else if( isExpanded === false ){ + break; + }else{ + throw Error("Neither true nor false "+ typeof(isExpanded) +" "+ isExpanded); + } + }while(0); + } + + function isAriaBtnExpanded( app, btnElem ){ var value = btnElem.getAttribute("aria-expanded"); if( value === "true" ){ -- cgit v1.1 From bcffb3a860552cd52d84805af0433b48949dec53 Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Wed, 14 Feb 2024 16:58:12 +0100 Subject: Fix minimalistic nginx config --- doc/note/nginx/nginx.txt | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/doc/note/nginx/nginx.txt b/doc/note/nginx/nginx.txt index d5ce9db..0550e7f 100644 --- a/doc/note/nginx/nginx.txt +++ b/doc/note/nginx/nginx.txt @@ -3,34 +3,44 @@ [looks promising](https://stackoverflow.com/a/73297125/4415884) + # Basic setup: + # - Maybe change "access_log" to "/var/log/nginx/access.log". + # - For CLI use: Change all "/tmp/nginx" to "." (single dot, aka workdir or + # other user writable dir). + # Public expose setup: + # - Adapt "listen" as commented. + # - set "server_name" to meaningful value. + # #daemon off; # run in foreground (eg from cli) events {} - pid nginx.pid; + pid /var/run/nginx.pid; http { access_log /dev/stdout; # Directories nginx needs configured to start up. - client_body_temp_path .; - proxy_temp_path .; - fastcgi_temp_path .; - uwsgi_temp_path .; - scgi_temp_path .; + client_body_temp_path /tmp/nginx; + proxy_temp_path /tmp/nginx; + fastcgi_temp_path /tmp/nginx; + uwsgi_temp_path /tmp/nginx; + scgi_temp_path /tmp/nginx; server { - # For public access use "80" and "[::]:80" + # public access: "80" and "[::]:80" + # local access: "127.0.0.1:80" and "[::1]:80" listen 127.0.0.1:80; listen [::1]:80; server_name localhost; - root /srv/www + root /srv/www; location /foo { + #autoindex on; # directory listing try_files $uri $uri/ =404; } location /example { - autoindex on; # enable directory listing return 200 "Example says hi"; } } } + [tutorial](https://www.javatpoint.com/nginx-minimal-configuration) -- cgit v1.1 From aaeb1e60e003016fa2e42171f22e23458a6811d2 Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Wed, 14 Feb 2024 16:59:46 +0100 Subject: Small rewording in windoof setup. --- doc/note/qemu/setup-windoof.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/note/qemu/setup-windoof.txt b/doc/note/qemu/setup-windoof.txt index c301416..1bac77f 100644 --- a/doc/note/qemu/setup-windoof.txt +++ b/doc/note/qemu/setup-windoof.txt @@ -29,7 +29,7 @@ internet access once more to cleanup the disk: SDelete.exe -nobanner -z C: -Shutdown guest, then compact disc. +Shutdown win guest, then compact disc. qemu-img convert -O qcow2 input.qcow output.qcow2 -- cgit v1.1 From f93fd4d14262527708ad4af8cce7801bf9d310fc Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Wed, 14 Feb 2024 19:46:08 +0100 Subject: (qemu) note how to compact snapshot with rebase --- doc/note/qemu/qemu.txt | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index f7c9498..a4db753 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -34,6 +34,14 @@ Windoof: qemu-img convert -O qcow2 input.qcow output.qcow2 +## Shrink snapshot layer + + qemu-img convert -O qcow2 snapLayer.qcow2 tmpFullClone.qcow2 + qemu-img create -f qcow2 -b tmpFullClone.qcow2 diff.qcow2 + qemu-img rebase -b base.qcow2 tmpDiff.qcow2 + mv tmpDiff.qcow2 snapLayer.qcow2 + + ## Example Params (Usage: CopyPaste, then delege what is not needed) qemu-system-x86_64 \ -accel kvm:whpx:hax:tcg -m size=2G -smp cores=$(nproc) \ -- cgit v1.1 From a55dab0397df3aafe9cf50148cdd5b8c3c9b76d7 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 16 Feb 2024 15:47:44 +0100 Subject: (links) java exception performance --- doc/note/links/links.txt | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index d187788..10bd745 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -264,13 +264,14 @@ Links (Aka argument amplifiers) - [Just one message per minute](https://gitit.post.ch/projects/ISA/repos/eagle/pull-requests/375/overview?commentId=330543) - [Houston down readyness probe timeout](https://wikit.post.ch/x/koO0Vg) -## Errorhandling is not needed ... -- [OOM exit code 137 9 sigkill houston openshift pod](https://jira.post.ch/browse/SDCISA-13746?focusedId=1925526&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1925526) - ## Common Performance +- [Java Exceptions performance is bad](https://www.baeldung.com/java-exceptions-performance) - [going fast is about doing less](https://m.youtube.com/watch?v=5rb0vvJ7NCY) - [CppCon Tuning Benchmarks clang CPUs Compilers" ](https://m.youtube.com/watch?v=nXaxk27zwlk) +## Errorhandling is not needed ... +- [OOM exit code 137 9 sigkill houston openshift pod](https://jira.post.ch/browse/SDCISA-13746?focusedId=1925526&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1925526) + ## Bugs are not an issue - [Bistr gateleen Forwarder exception](https://jira.post.ch/browse/SDCISA-11147) -- cgit v1.1 From 968085a09fc02e4692fe8948c6f0b7fa4ec1c0d2 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 16 Feb 2024 15:48:23 +0100 Subject: (maven) Custom deployments --- doc/note/maven/maven.txt | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/doc/note/maven/maven.txt b/doc/note/maven/maven.txt index 309fa63..10a606a 100644 --- a/doc/note/maven/maven.txt +++ b/doc/note/maven/maven.txt @@ -23,10 +23,15 @@ mvn deploy -DaltDeploymentRepository=artifactory-snapshots::default::https://art mvn deploy -Dcmake.generate.skip=true -Dcmake.compile.skip=true -DaltDeploymentRepository=artifactory-releases::default::https://artifactory.tools.pnet.ch/artifactory/libs-release-local -true `# Deploy custom build 20240206` \ +true \ && DEPLOPTS= \ + && `# Deploy custom gateleen build 20240206` \ && mvn clean install -pl '!gateleen-hook-js,!gateleen-playground' \ - && mvn deploy -DskipTests -pl '!gateleen-hook-js,!gateleen-playground' $DEPLOPTS \ + && mvn deploy -DskipTests -pl '!gateleen-hook-js,!gateleen-playground' ${DEPLOPTS:?} \ + && `# Deploy custom houston build 20240216` \ + && jenkinsbuild-by-upstream \ + #&& mvn clean install \ + #&& mvn deploy -DskipTests ${DEPLOPTS:?} \ && true -- cgit v1.1 From 28c3a9b3b579d7125c946f75f4a844293488599f Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 16 Feb 2024 18:34:17 +0100 Subject: (qemu) Add notes how to build kernel. Especially to host docker micro VMs through qemu. --- doc/note/qemu/qemu.txt | 1 + doc/note/qemu/setup-dockerVM.txt | 73 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+) create mode 100644 doc/note/qemu/setup-dockerVM.txt diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index a4db753..c03d4cb 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -43,6 +43,7 @@ Windoof: ## Example Params (Usage: CopyPaste, then delege what is not needed) +# TODO use '-nodefaults'. qemu-system-x86_64 \ -accel kvm:whpx:hax:tcg -m size=2G -smp cores=$(nproc) \ -monitor stdio \ diff --git a/doc/note/qemu/setup-dockerVM.txt b/doc/note/qemu/setup-dockerVM.txt new file mode 100644 index 0000000..b3e7e67 --- /dev/null +++ b/doc/note/qemu/setup-dockerVM.txt @@ -0,0 +1,73 @@ + +# +# Use qemu to host dockerimages. +# +# [Execute Docker Containers as QEMU MicroVMs](https://mergeboard.com/blog/2-qemu-microvm-docker/) +# + +true \ + && LINUX_URL=https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-6.7.4.tar.xz \ + && SUDO=sudo \ + && CACHEDIR=/var/tmp \ + && WORKDIR=/home/$USER/work \ + && LINUX_TXZ=$(basename "${LINUX_URL:?}") \ + && true \ + && $SUDO apt install -y --no-install-recommends make gcc bc flex bison libc-dev libelf-dev libssl-dev \ + && true \ + && cd "${CACHEDIR:?}" \ + && curl -L "${LINUX_URL:?}" -O \ + && cd "${WORKDIR:?}" \ + && tar xf "${CACHEDIR:?}/${LINUX_TXZ:?}" \ + && cd linux* \ + && base64 -d < .config && +H4sIAFBoz2UAA4VZS3MctxG++1eo7EtySCjSoopOlQ4YALMD7eBBALMPplKoSKZkVUmiLVKp5N/n +A2Z2F5jB0jppv+4BGv3u5k8v/vn47f2//vLb09Pvj/+4uPhy/+3j/buHf3/79e/vH75cfPj0+f7x +4t3nh48Xf9x/+f63L5/ef3v4z5eLX+8/vH/4+uHTx7/+8NMLqlUrVqHXlPQbbp3QKpDB6xefHl98 +fXh68Xj/9MPE4/ZuIwx9878DYLQTuyBvBz5woMfDBsd70VROIAMT/vS90qG7O/3sxKoLlrvghYQk +J0LjWDBWU+5cIJT685Sw+flE9MStnSfelVBgvCf72UGJsKtgQkdOPSgv1OpE6zUuH9rgOtH6N1eX +xycNkoSG9ETRkf+olJICIVoy9D5wRZqes4qyJJc0u7Hp14GurB7MCaOtw5GKbQXz3Qm2PiTG4GjH +WcY+okYwtwBby/kdtwu8G1bc902GG5jXLw9gfCMoX8BgL7V6EILbdgE2JsNwjQ0quyi9JjnnTA02 +GrTUFKQJQglvs+fj9OjDcPS+BN8KH0i/JXsXtCoFSEZqWW5IqqUhPjR2XYuRnjRJl71wPnTEMq5y +E8BZW9EXruRk9pTdzeuwuyJG0PzKiPKd54pBAaYnvtVWVm7v9lDrRjhtw2rgLtO6IZZshK0gwRmh +EP5rV6HFSKz6v7LJEd5cXt3kolsOy3geIB8UpNdchUZrH4S9dfMHScrzJxxpUlBEtGa88sD0nbOl +viCIYPPTr3u+4X0tB0kWQwuBR+3e+Py7GQkP92LDQ7M/RGvluBjWeUZjyF9jUrh8ebzS0i4erO0+ +OkDDZ/J3nK5DI7SDb1k7GC9yN1zzHacBXlPkWIuw11LcQTziaqoy3d4J+Do8W6zUm5f/vXyZ/h0z +GF8Rug+bKSIClwMcq7jDDc7A52qOJhpuFSkFhR2i69+GmHQr8CHlrfQmhT98GMmQl5yC9Txx9ISx +PB8J5XmfyBlGfr6aBB8lOYoevXfAy+8SpfKCdTJE5vNvB2lwacOz5CA1G/qcaQSie1OOGkDYgjSo +BTyV1eUxztJjye0X1wYnVjUM7kWuL69q+c7zlRW+kgp9Z7X3Zd5JBUUygDwGvMuVHWnbpswWXvgk +KdtEqxUBJx2D8x55qmHNW/EnHPIW1XZM8oyTJNif8a338MOKdRuhWumRSlzWrkwR2Glv+mFVwy3+ +t8n8a+1k1g4g5BxewJVPVdGQFX+eGiSUJVzu4T2H8ghkP2Gt1cq7LcmqwJ3ROvMHBEsMjVkvo3hh +H7rOfw9K7LIs01qZytkMckODJq4XdD8jSLGyYzIo2CEAypqgmRSiEEOYgGgEB3ElevCakOqDXX5h +4Nw+tkKuoI3lZOQgeY9zpCGCGp3rGBSjzPx3YB1dgrE4LVFLbAnKdFUFelZ2I6STYXNZA6/y8Ik6 +DEyQVcWTPUUPpdEw1uLuSGwErYbKkWGLbmCrNXueq8P/zsgg2XWRj4TZvJ6MiZrG2xoFD2v1jKBR +3WRyosBI8ZhEdqJWZEfS0CBASx0DXtrmCD5nHTBlpiju4avXod/6Qam8FpxonSS0CMNZ1QCy1XYd +K3BqoBA20pTdE/eo50UcNFYwZIwKRbXROAoJhq7rKPKMPUdynD5DRY+kipySE6PsUOMz1PSyyrPC +DrmXeHQ9KOLWoyt9lqn+vDhiFYE4YTDEqyKkgAtTiT/gc12OqCcWQw2a1bec+jNEt1foEHb7OVmR +c19I4jAJW8L4GQa8XBJzVhgmbEUctEirvOUZ/aRQ53zMixBPmi0x2schObuB0Wyu2zgdS0imwthD +YfxdEqTpl/NfvMFYoasE2hPnxGwWe5vvAtJwhvgm8tSMHFPDFqqBdV2t/8bpv5Q+iN+T7Fk9TJ4e +Ot6b3B8KOKQK8+PF47tPXy9+e3j6/fP3jz8edcU3Xpo2e/cBQWuG6Sh7NrwZhtSzdiq2Yb3WZqFf +EPMZ14nyVzwPcVaEacIRC6tykIwomnn0aPDeLEWJRPG0VNM4sOfLkSFr5jc8r7aTqGOtP7V0vsMU +wH21qGx7Uuu7hTKDD20bu65o0rIORhrUWR1oJmrR0m1gu23SkEaWR9fHZluXacoxfl9zHriZwIR0 +c3X9sph7TjDMhupG0QuxVLpw0/PnHITJzJIRMTYP6IExN9eoNk7ZGLjrLNGBmmEZn4sLoZFxOqxA +i8Aw3oTL65sbhKjOE7DRW0SE5a40ebeV1XEqeoKEkFvMHzETox8WJvetiR4nu5YIG4cYy+v0tPhB +L52Php1glmwLSQQLK66gm3rrE+nklee0O0826A3OUlHT16I+qUQycq61+2fIAr+eoe/hV2VGK+j8 +bs3Pfy38ebnXXDlEgT8zZUWWtGJxuq1HbuLATMLtGQkG16AbM0bbWrPGMPXMU0vMR4cVxgSRYceE +M8XmLu2cmBWbYv17yJOYkXW+b5hwKXN/zsDjdLsUx5MydY7LBT9glEDF3YVsi2xJ9iHf+VehXQLT +KpzQfklCBzaUU3miMN7kQ+jbhl3NMXw8raKKPUtLlPai3S+RuKqLq3AUM+TJ2dLhdtCezH6m7lWo +dVKAbceQO9oZ5RzFcWTcEqugsoq9MTHbuFDP1RJ38mFNdR7iCUNY9AxDchb5ZVUdS2pFndMiOudF +v0VcV0PCjnhvK3h/96qK6toZdxXwzvl8net88UZ0Hrk4Ch3XtPV68+P3pw83x3bCxL2P87H5QJxD +z6vcqUajx/0G3+evW3jSAWkrXIc55DwlTfWZFyGeoxM5PVjKK5/FP+uoYXeeMs7TxBL5DA+iPlaI +ZzjSQtTyW3iMDxvSD/xUDJNmNKb5rOEaIU4roOO3YrNAqaX+8iUTbTC0x5C4YCDpr1ALuPCTgyS2 +gXPCOeoU6otOdiJ121oXIVYq7vd5ypRxsSFaQWdLVreH10g0jRiX0t8YJvfJb6dhfF2W3+xtXPTk +O6UY2+MUl7W0e0VknMzLPORiMUdTKXG7LDpgsEXzpcUgMs5s6JDIsjRKbG/nH7WLY9axleyLNmxa +BYJa05aHBuLUmA9HOAeSMIKEVVulWzqEaZPW98f59vqXbK0OjnRotfC150lxkT89zQwVcQe1RXeK +xqZFdHAYI2VciPZ/TosyDcUdAAA= +EOF +true \ + && make olddefconfig \ + && make -j$(nproc) \ + && echo "[WARN ] TODO steps from here on are UNTESTED" && sleep 3 \ + && ls -Ahl arch/*/boot/bzImage \ + && true + + -- cgit v1.1 From d9cc1a9fd67ecf8cdf6e7a75cdd4587ec4fd2d91 Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Mon, 19 Feb 2024 22:44:38 +0100 Subject: (qemu) Begun notes about how to build qemu itself. --- doc/note/qemu/qemu-compile-itself.txt | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 doc/note/qemu/qemu-compile-itself.txt diff --git a/doc/note/qemu/qemu-compile-itself.txt b/doc/note/qemu/qemu-compile-itself.txt new file mode 100644 index 0000000..c274c13 --- /dev/null +++ b/doc/note/qemu/qemu-compile-itself.txt @@ -0,0 +1,21 @@ + +# +# Compile qemu bullshit-free for windoof. +# + +true \ + && QEMU_URL="https://gitlab.com/qemu-project/qemu/-/archive/v8.2.1/qemu-v8.2.1.tar.gz" \ + && WORKDIR=/home/${USER:?}/work \ + && CACHEDIR=/var/tmp \ + && QEMU_TGZ="${QEMU_URL_TODOCUTIT:?}" \ + && apt install -y --no-install-recommends curl \ + && curl -D- "${QEMU_URL:?}" -o "${CACHEDIR:?}/${QEMU_TGZ:?}" \ + && cd "${WORKDIR:?}" \ + && tar xf "${CACHEDIR:?}/${QEMU_TGZ:?}" \ + && mkdir build \ + && cd build \ + && ../configure \ + && make \ + && true + + -- cgit v1.1 From d2cf81052a817a9b9a61e2a97374034375a81ba6 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Mon, 19 Feb 2024 22:49:22 +0100 Subject: (qemu) Cleanup qcow2 inspection mount notes. --- doc/note/qemu/qemu.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index f7c9498..1862858 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -73,11 +73,11 @@ qemu-system-x86_64 \ ## Inspect qcow2 by host mounting it $SUDO modprobe nbd - $SUDO qemu-nbd -c /dev/nbd0 /path/to/my.qcow2 - echo 'p' | $SUDO fdisk /dev/nbd0 - $SUDO mount -o ro /dev/nbd0p2 /mnt/q + $SUDO qemu-nbd -c /dev/nbd__ /path/to/my.qcow2 + echo 'p' | $SUDO fdisk /dev/nbd__ + $SUDO mount -o ro /dev/nbd__p__ /mnt/q $SUDO umount /mnt/q `# cleanup` - qemu-nbd -d /dev/nbd0 `# cleanup` + $SUDO qemu-nbd -d /dev/nbd__ `# cleanup` $SUDO rmmod nbd `# cleanup` -- cgit v1.1 From 751402766c157c4ca6b1102e42330a3d3f55a548 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 20 Feb 2024 00:25:50 +0100 Subject: (qemu) Continued on microVM. Kernel looks ok. --- doc/note/qemu/qemu-compile-itself.txt | 22 ++++++++++++++-------- doc/note/qemu/qemu.txt | 3 ++- doc/note/qemu/setup-dockerVM.txt | 23 ++++++++++++++++++----- 3 files changed, 34 insertions(+), 14 deletions(-) diff --git a/doc/note/qemu/qemu-compile-itself.txt b/doc/note/qemu/qemu-compile-itself.txt index c274c13..aed0522 100644 --- a/doc/note/qemu/qemu-compile-itself.txt +++ b/doc/note/qemu/qemu-compile-itself.txt @@ -2,19 +2,25 @@ # # Compile qemu bullshit-free for windoof. # +# [src](https://wiki.qemu.org/Hosts/W32) +# +# +# WARN: This does NOT work +# true \ - && QEMU_URL="https://gitlab.com/qemu-project/qemu/-/archive/v8.2.1/qemu-v8.2.1.tar.gz" \ + && QEMU_URL="https://download.qemu.org/qemu-8.2.1.tar.xz" \ + && SUDO=sudo \ && WORKDIR=/home/${USER:?}/work \ && CACHEDIR=/var/tmp \ - && QEMU_TGZ="${QEMU_URL_TODOCUTIT:?}" \ - && apt install -y --no-install-recommends curl \ - && curl -D- "${QEMU_URL:?}" -o "${CACHEDIR:?}/${QEMU_TGZ:?}" \ + && QEMU_TXZ=$(basename "${QEMU_URL:?}") \ + && $SUDO apt install -y --no-install-recommends curl \ + && curl -D- "${QEMU_URL:?}" -o "${CACHEDIR:?}/${QEMU_TXZ:?}" \ + && mkdir -p "${WORKDIR:?}" \ && cd "${WORKDIR:?}" \ - && tar xf "${CACHEDIR:?}/${QEMU_TGZ:?}" \ - && mkdir build \ - && cd build \ - && ../configure \ + && tar xf "${CACHEDIR:?}/${QEMU_TXZ:?}" \ + && cd qemu* \ + && ./configure \ && make \ && true diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 4dd7f0c..0dac9bd 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -43,8 +43,8 @@ Windoof: ## Example Params (Usage: CopyPaste, then delege what is not needed) -# TODO use '-nodefaults'. qemu-system-x86_64 \ + -nodefaults `# <- TODO Fix network when using this` \ -accel kvm:whpx:hax:tcg -m size=2G -smp cores=$(nproc) \ -monitor stdio \ `# Drives & Boot.` \ @@ -64,6 +64,7 @@ qemu-system-x86_64 \ -usb -device usb-host,id=myUsbQemuId,vendorid=0xFFFF,productid=0xFFFF \ `# Choose ONE of those for graphic output` \ -nographic \ + -device VGA \ -display sdl,grab-mod=rctrl \ -display gtk,show-menubar=on \ -display vnc=127.0.0.1:0,to=99 `#HINT: 0 is port 5900` \ diff --git a/doc/note/qemu/setup-dockerVM.txt b/doc/note/qemu/setup-dockerVM.txt index b3e7e67..9bbcff7 100644 --- a/doc/note/qemu/setup-dockerVM.txt +++ b/doc/note/qemu/setup-dockerVM.txt @@ -11,11 +11,10 @@ true \ && CACHEDIR=/var/tmp \ && WORKDIR=/home/$USER/work \ && LINUX_TXZ=$(basename "${LINUX_URL:?}") \ - && true \ - && $SUDO apt install -y --no-install-recommends make gcc bc flex bison libc-dev libelf-dev libssl-dev \ - && true \ + && $SUDO apt install -y --no-install-recommends curl make gcc bc flex bison libc-dev libelf-dev libssl-dev \ && cd "${CACHEDIR:?}" \ && curl -L "${LINUX_URL:?}" -O \ + && mkdir -p "${WORKDIR:?}" \ && cd "${WORKDIR:?}" \ && tar xf "${CACHEDIR:?}/${LINUX_TXZ:?}" \ && cd linux* \ @@ -66,8 +65,22 @@ EOF true \ && make olddefconfig \ && make -j$(nproc) \ - && echo "[WARN ] TODO steps from here on are UNTESTED" && sleep 3 \ - && ls -Ahl arch/*/boot/bzImage \ + && base64 -d < "${CACHEDIR:?}/gagainit.c" && +H4sIACTh02UAA41UTYvbMBC9+1cIlwY7JKtsYSlsNoVSAj1s0x62pzQsijxORCzJq4+wacl/35Ed +u85HS3ywnp5n5j2NJNP+3Bq+SNbOlfaeUglmBUvNTHbDtaTLQq/oh+ELSD+Ughu9lcNM8w0YmvZp +9E4oXvgMyAMYo/TN+lOHsi4T51QhlsecVwLpk7idpVJ75c5p69h/2IivmSF9rpV1JIOc+cI9g9oK +o5UE5eYLMiF/IoJP/OPz09cJ9dbQQnNW0KVQ952pbectqKnqFWA8qArNfj4+DqL9OIq2WmSk8v3M +18A3SfW9NlMbs9obDoMjzjFsuRucxeaiAFybA+l2JeZ4ZcVKQUYKrVa1TF6wle1mVg76GXMsSg/r +tM547kjoEBEq1+gz0CInSeCSgzzphY8pmUzI8Jb0eqTa0TCdzr5PZ09NufCURqB2En8xwJxAM+/t +L+wGqUul4zYwiMhNJkyrMvp4d5eSBzLq1qtqop42nZqYBdxpsyM5w05kcadueOBVuOS2Q+6j+h11 +LX4LbfqXxcpeCEiafWlsnva+020Sunu2hMZ+pXfB8ZHbfYQu0R+RTKgkAJTlg8O+I97OF03x7nGK +lVaAi4hpaTQP4DCOEDZqlxMy2NLS2YAR1ui6LPniwVfTFl2XaNcyYCfL/Do1bHgYcbg+geaW8pXR +vgxMi5rc+gaAW2vrFJOQxIe/GMZY8Rt0/pdJm3h4BV5gaLjpuIpKqoXVbb/0azk4bQ7CFIucnAMD +zhtFbse4/W9r76rneAUAAA== +EOF +true \ + && gcc -Wall -static -o "${CACHEDIR:?}/gagainit" "${CACHEDIR:?}/gagainit.c" \ + && echo "[ERROR] TODO Need more steps here" && false \ && true -- cgit v1.1 From ed530a5d40150fe879daff72c28a037691e90ce9 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 23 Feb 2024 01:59:05 +0100 Subject: Give new C API a try. Looks promising so far. --- src/main/c/commonbase.h | 1 + src/main/c/paisa-fleet/FindFullDisks.c | 155 +++++++++++++++++++++++++++++++++ 2 files changed, 156 insertions(+) create mode 100644 src/main/c/commonbase.h create mode 100644 src/main/c/paisa-fleet/FindFullDisks.c diff --git a/src/main/c/commonbase.h b/src/main/c/commonbase.h new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/main/c/commonbase.h @@ -0,0 +1 @@ + diff --git a/src/main/c/paisa-fleet/FindFullDisks.c b/src/main/c/paisa-fleet/FindFullDisks.c new file mode 100644 index 0000000..a845571 --- /dev/null +++ b/src/main/c/paisa-fleet/FindFullDisks.c @@ -0,0 +1,155 @@ +/* TODO move compile cmd somewhere better maybe? + +true \ + && CFLAGS="-Wall -Werror -pedantic -ggdb -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function" \ + && ${CC:?} -o build/bin/findfulldisks $CFLAGS src/main/c/paisa-fleet/FindFullDisks.c -Isrc/main/c -Iimport/include -Limport/lib -lgarbage -lpthread \ + && true + +*/ + +#include +#include +#include +#include + +#include "Garbage.h" + + +typedef struct FindFullDisks FindFullDisks; +typedef struct Device Device; + + +struct FindFullDisks { + char *sshUser; + int sshPort; + struct GarbageEnv **garb; + struct Garbage_Process **child; + int devices_len; + Device *devices; +}; + + +struct Device { + char hostname[sizeof"lunkwill-0123456789AB"]; + char eddieName[sizeof"eddie12345"]; + char lastSeen[sizeof"2023-12-31T23:59:59"]; +}; + + +static void Child_onStdout( const char*buf, int buf_len, void*cls ){ + //struct FindFullDisks*const app = cls; + if( buf_len > 0 ){ /*another chunk*/ + fprintf(stdout, "%.*s", buf_len, buf); + }else{ /*EOF*/ + assert(buf_len == 0); + } +} + + +static void Child_onJoined( int retval, int exitCode, int sigNum, void*cls ){ + //struct FindFullDisks*const app = cls; + fprintf(stderr, "[TRACE] %s(%d, %d, %d)\n", __func__, retval, exitCode, sigNum); +} + + +static void visitDevice( struct FindFullDisks*app, const Device*device ){ + assert(device != NULL); + fprintf(stderr, "[TRACE] %s \"%s\" (behind \"%s\")\n", __func__, + device->hostname, device->eddieName); + int err; + char eddieCmd[2048]; + err = snprintf(eddieCmd, sizeof eddieCmd, "true" + " && HOSTNAME=$(hostname|sed 's_.pnet.ch__')" + " && STAGE=$PAISA_ENV" + " && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\"" + " && if test \"${HOSTNAME}\" != \"%s\"; then true" + " && echo wrong host. Want %s found $HOSTNAME && false" + " ;fi" + " && ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null" + " -p%d %s@%s" + " -- sh -c 'true" + " && HOSTNAME=$(hostname|sed '\"'\"'s_.isa.localdomain__'\"'\"')" + " && STAGE=$PAISA_ENV" + " && printf \"remoteHostname=$HOSTNAME, remoteStage=$STAGE\\n\"" + // on some machine, df failed with "Stale file handle" But I want to + // continue with next device regardless of such errors. + " && df || true" + "'", + device->eddieName, device->eddieName, app->sshPort, app->sshUser, + strncmp("fook-",device->hostname,5) ? device->hostname : "fook" + ); + assert(err < sizeof eddieCmd); + assert(app->sshPort > 0 && app->sshPort < 0xFFFF); + char sshPortStr[12]; + sprintf(sshPortStr, "%d", app->sshPort); + char userAtEddie[64]; + err = snprintf(userAtEddie, sizeof userAtEddie, "%s@%s", app->sshUser, device->eddieName); + assert(err < sizeof userAtEddie); + app->child = (*app->garb)->newProcess(app->garb, &(struct Garbage_Process_Mentor){ + .cls = app, + .usePathSearch = !0, + .argv = (char*[]){ "ssh", + "-oRemoteCommand=none", + "-oStrictHostKeyChecking=no", + "-oUserKnownHostsFile=/dev/null", + "-p", sshPortStr, + userAtEddie, + "--", "sh", "-c", eddieCmd, + NULL, + }, + .onStdout = Child_onStdout, + .onJoined = Child_onJoined, + }); + assert(app->child != NULL); + (*app->child)->join(app->child, 42000); +} + + +static void startApp( void*cls ){ + struct FindFullDisks *app = cls; + for( int i = 0 ; i < app->devices_len ; ++i ){ + visitDevice(app, app->devices + i); + } +} + + +static void setupExampleDevices( FindFullDisks*app ){ + app->devices_len = 3; + app->devices = realloc(NULL, app->devices_len*sizeof*app->devices); + assert(app->devices != NULL || !"ENOMEM"); + /**/ + strcpy(app->devices[0].hostname, "fook-12345"); + strcpy(app->devices[0].eddieName, "eddie09845"); + strcpy(app->devices[0].lastSeen, "2023-12-31T23:59:59"); + /**/ + strcpy(app->devices[1].hostname, "fook-67890"); + strcpy(app->devices[1].eddieName, "eddie12345"); + strcpy(app->devices[1].lastSeen, "2023-12-31T23:42:42"); + /**/ + strcpy(app->devices[2].hostname, "lunkwill-12345"); + strcpy(app->devices[2].eddieName, "eddie09845"); + strcpy(app->devices[2].lastSeen, "2023-12-31T23:59:42"); + /**/ +} + + +int main( int argc, char**argv ){ + static union{ void*align; char space[SIZEOF_struct_GarbageEnv]; } garbMemory; + FindFullDisks app = { + .sshUser = "brĂĽnzli", + .sshPort = 22, + .garb = NULL, + .child = NULL, + }; + setupExampleDevices(&app); + app.garb = GarbageEnv_ctor(&(struct GarbageEnv_Mentor){ + .memBlockToUse = &garbMemory, + .memBlockToUse_sz = sizeof garbMemory, + }); + assert(app.garb != NULL); + (*app.garb)->enqueBlocking(app.garb, startApp, &app); + (*app.garb)->runUntilDone(app.garb); + return 0; +} + + -- cgit v1.1 From 6c1f32be3051cc3ccbc4abefd197c8ffd5d8feee Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 23 Feb 2024 15:18:22 +0100 Subject: New Link. Qemu cleanup. --- doc/note/links/links.txt | 1 + doc/note/qemu/qemu.txt | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 10bd745..2d918c2 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -258,6 +258,7 @@ Links (Aka argument amplifiers) - [Houston OOM 2023-06-27](https://wikit.post.ch/x/_Bv6Rw) - [Houston OOM 2023-01-20](https://wikit.post.ch/x/iRepPQ) - [Houston OOM Killed](https://jira.post.ch/browse/SDCISA-10871) +- [SDCISA-14967 Houston collects all req bodies into memory](https://jira.post.ch/browse/SDCISA-14967) - [http cache disable](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/1/overview?commentId=287832) - [How to repair KISS for performance](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff) - [Houston readyness fails often](https://jira.post.ch/browse/SDCISA-13746?focusedId=1899551&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1899551) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 0dac9bd..13dc325 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -72,9 +72,6 @@ qemu-system-x86_64 \ ## Broken systems likely need some of those too `# Fix broken hosts` \ - `# TODO test accel=hax` \ - `# TODO test accel=whpx` \ - -machine pc,accel=hax \ -L "${QEMU_HOME:?}/Bios" -bios "${QEMU_HOME:?}/Bios/bios-256k.bin" \ `# Fix broken guests` \ -device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet \ -- cgit v1.1 From 593aefbc8538ce20b3a5b48319da57656de8cc8e Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 23 Feb 2024 15:21:51 +0100 Subject: Add qemu zwp (WIP). Add JavaCallgraph.lua (WIP). --- doc/note/qemu/setup-zwp-env.txt | 24 ++++++ src/main/lua/misc/JavaCallgraph.lua | 159 ++++++++++++++++++++++++++++++++++++ 2 files changed, 183 insertions(+) create mode 100644 doc/note/qemu/setup-zwp-env.txt create mode 100644 src/main/lua/misc/JavaCallgraph.lua diff --git a/doc/note/qemu/setup-zwp-env.txt b/doc/note/qemu/setup-zwp-env.txt new file mode 100644 index 0000000..a4ce521 --- /dev/null +++ b/doc/note/qemu/setup-zwp-env.txt @@ -0,0 +1,24 @@ + +(true \ + && DIO_URL=https://wikit.post.ch/download/attachments/613505757/d-054897-060542.zip?api=v2 \ + && SUDO=sudo \ + && WORKDIR=/home/$USER/zarniwoop-workspace \ + && CACHEDIR=/var/tmp \ + && DIO_VERSION=$(echo $DIO_URL|sed -E 's_^.*/d-([0-9-]+).zip.*$_\1_') \ + && $SUDO apt install -y --no-install-recommends openssh-server vim make curl git unzip \ + && cd "${CACHEDIR:?}" \ + && echo H4sIAAAAAAAAA1XLOw4CMQwA0Z5TUCMtsuNffBw7jiU6ak7PChqoZppX4NEUaLOw2Vfw6JRg8UXW6tdbHSA83c644by/Hs8Lp23PziU+AjcHdcKmjbjFJ8av0nPGRwGanmRkdSlsKTItcBQgjCL8U+b6VW9H4D67ogAAAA== | base64 -d | gunzip > MD5SUM \ + && curl -LO 'https://wikit.post.ch/download/attachments/613505757/d-054897-060542.zip?api=v2' \ + && grep "${DIO_VERSION:?}" MD5SUM | md5sum -c - \ + && mkdir -p "${WORKDIR:?}" \ + && cd "${WORKDIR:?}" \ + && unzip "${CACHEDIR:?}/d-${DIO_VERSION:?}.zip" \ + && mv DIO021E "d-${DIO_VERSION:?}" \ + && cd "d-${DIO_VERSION:?}/devel" \ + && rm -rf app \ + && git clone https://gitit.post.ch/scm/isa/zarniwoop.git app \ + && cd app \ + && printf '\n Zarniwoop setup complete (TODO install compiler etc)\n\n' \ + && true) + + diff --git a/src/main/lua/misc/JavaCallgraph.lua b/src/main/lua/misc/JavaCallgraph.lua new file mode 100644 index 0000000..6d0bd62 --- /dev/null +++ b/src/main/lua/misc/JavaCallgraph.lua @@ -0,0 +1,159 @@ + +local SL = require("scriptlee") +local newJavaClassParser = SL.newJavaClassParser +local objectSeal = SL.objectSeal +SL = nil + +local snk = io.stdout + +local main + + +function initParser( app ) + app.parser = newJavaClassParser{ + cls = app, + onMagic = function(m, app) assert(m == "\xCA\xFE\xBA\xBE") end, + onClassfileVersion = function(maj, min, app) assert(maj == 55 and min == 0) end, + onConstPoolClassRef = function(i, idx, app) + app.constPool[i] = objectSeal{ type = "CLASS_REF", classNameIdx = idx, className = false, } + end, + onConstPoolIfaceMethodRef = function(i, nameIdx, nameAndTypeIdx, app) + app.constPool[i] = objectSeal{ + type = "IFACE_METHOD_REF", nameIdx = nameIdx, nameAndTypeIdx = nameAndTypeIdx, + className = false, methodName = false, methodType = false, + } + end, + onConstPoolMethodRef = function(i, classIdx, nameAndTypeIdx, app) + app.constPool[i] = objectSeal{ + type = "METHOD_REF", classIdx = classIdx, nameAndTypeIdx = nameAndTypeIdx, + className = false, methodName = false, signature = false, + } + end, + onConstPoolMethodType = function(i, descrIdx, app) + app.constPool[i] = objectSeal{ + type = "METHOD_TYPE", descrIdx = descrIdx, descrStr = false, + } + end, + onConstPoolNameAndType = function(i, nameIdx, typeIdx, app) + app.constPool[i] = objectSeal{ + type = "NAME_AND_TYPE", nameIdx = nameIdx, typeIdx = typeIdx, nameStr = false, typeStr = false, + } + end, + onConstPoolUtf8 = function(i, str, app) + app.constPool[i] = objectSeal{ type = "UTF8", str = str, } + end, + + onConstPoolInvokeDynamic = function(i, bootstrapMethodAttrIdx, nameAndTypeIdx, app) + app.constPool[i] = objectSeal{ + type = "INVOKE_DYNAMIC", bootstrapMethodAttrIdx = bootstrapMethodAttrIdx, nameAndTypeIdx = nameAndTypeIdx, + methodName = false, methodType = false, factoryClass = false, factoryMethod = false, factoryType = false, + } + end, + onConstPoolFieldRef = function(i, nameIdx, nameAndTypeIdx, that) + app.constPool[i] = objectSeal{ + type = "FIELD_REF", nameIdx = nameIdx, nameAndTypeIdx = nameAndTypeIdx, + className = false, methodName = false, methodType = false, + } + end, + --onConstPoolMethodHandle = function(i, refKind, refIdx, app) + -- app.constPool[i] = objectSeal{ type = "METHOD_HANDLE", refKind = refKind, refIdx = refIdx, } + --end, + --onConstPoolStrRef = function(i, dstIdx, app) + -- print("ConstPool["..i.."] #"..dstIdx) + --end, + --onThisClass = function(nameIdx, app) + -- -- TODO print("onThisClass(#"..nameIdx..")") + --end, + --onField = function(iField, accessFlags, nameIdx, descrIdx, numAttrs, app) + -- print(string.format("onField(0x%04X, #%d, #%d, %d)",accessFlags,nameIdx,descrIdx,numAttrs)) + --end, + --onMethod = function(accessFlags, nameIdx, descrIdx, app) + -- print(string.format("onMethod(0x%04X, #%d, #%d)",accessFlags,nameIdx,descrIdx)) + --end, + + onConstPoolEnd = function( app ) + -- 1st run + for i, cpe in pairs(app.constPool) do + if false then + elseif cpe.type == "CLASS_REF" then + local tmp + tmp = assert(cpe.classNameIdx) + tmp = assert(app.constPool[cpe.classNameIdx], cpe.classNameIdx) + tmp = assert(tmp.str, tmp) + cpe.className = assert(tmp) + elseif cpe.type == "METHOD_TYPE" then + cpe.descrStr = assert(app.constPool[cpe.descrIdx].str) + elseif cpe.type == "NAME_AND_TYPE" then + cpe.nameStr = assert(app.constPool[cpe.nameIdx].str); + cpe.typeStr = assert(app.constPool[cpe.typeIdx].str); + end + end + -- 2nd run + for i, cpe in pairs(app.constPool) do + if false then + elseif cpe.type == "FIELD_REF" then + local nameAndType = assert(app.constPool[cpe.nameAndTypeIdx]) + cpe.className = assert(app.constPool[cpe.nameIdx].className); + cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str); + cpe.methodType = assert(app.constPool[nameAndType.typeIdx].str); + elseif cpe.type == "METHOD_REF" then + local nameAndType = app.constPool[cpe.nameAndTypeIdx] + cpe.className = assert(app.constPool[cpe.classIdx].className) + cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str) + cpe.signature = assert(app.constPool[nameAndType.typeIdx].str) + elseif cpe.type == "IFACE_METHOD_REF" then + local classRef = assert(app.constPool[cpe.nameIdx]) + local nameAndType = assert(app.constPool[cpe.nameAndTypeIdx]) + cpe.className = assert(classRef.className) + cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str) + cpe.methodType = assert(app.constPool[nameAndType.typeIdx].str) + elseif cpe.type == "INVOKE_DYNAMIC" then + local nameAndType = assert(app.constPool[cpe.nameAndTypeIdx]) + local bootstrapMethod = assert(app.constPool[cpe.bootstrapMethodAttrIdx], cpe.bootstrapMethodAttrIdx); + cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str) + cpe.methodType = assert(app.constPool[nameAndType.typeIdx].str) + --cpe.factoryClass = ; + --cpe.factoryMethod = ; + --cpe.factoryType = ; + end + end + -- debug-print + snk:write("\n") + for _,cpIdx in pairs{ 13, 14, 15, 227, 230, 236, 704, 709, 717 }do + snk:write("CONST_POOL @ ".. cpIdx .."\n") + for k,v in pairs(app.constPool[cpIdx])do print("X",k,v)end + end + for i, cpe in pairs(app.constPool) do + if false then + --elseif cpe.type == "CLASSREF" then + -- snk:write("CLASS \"".. cpe.className .."\"\n") + end + end + end, + } +end + + +function main() + local app = objectSeal{ + parser = false, + constPool = {}, + } + + initParser(app) + + -- Read 1st arg as a classfile and pump it into the parser. + local src = arg[1] and io.open( arg[1], "rb" ) or nil + if not src then + print("ERROR: Failed to open file from 1st arg: "..(arg[1]or"nil")) return + end + while true do + local buf = src:read(8192) + if not buf then break end + app.parser:write(buf) + end + app.parser:closeSnk() +end + + +main() -- cgit v1.1 From 3ca0cbd584a5e191c6d4e6b6872521001f8f7c2e Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 23 Feb 2024 21:34:52 +0100 Subject: Try C impl of FindFullDisks --- src/main/c/paisa-fleet/FindFullDisks.c | 49 ++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 20 deletions(-) diff --git a/src/main/c/paisa-fleet/FindFullDisks.c b/src/main/c/paisa-fleet/FindFullDisks.c index a845571..50f7b90 100644 --- a/src/main/c/paisa-fleet/FindFullDisks.c +++ b/src/main/c/paisa-fleet/FindFullDisks.c @@ -38,6 +38,7 @@ struct Device { static void Child_onStdout( const char*buf, int buf_len, void*cls ){ //struct FindFullDisks*const app = cls; + //fprintf(stderr, "[TRACE] %s(buf, %d, cls)\n", __func__, buf_len); if( buf_len > 0 ){ /*another chunk*/ fprintf(stdout, "%.*s", buf_len, buf); }else{ /*EOF*/ @@ -62,7 +63,7 @@ static void visitDevice( struct FindFullDisks*app, const Device*device ){ " && HOSTNAME=$(hostname|sed 's_.pnet.ch__')" " && STAGE=$PAISA_ENV" " && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\"" - " && if test \"${HOSTNAME}\" != \"%s\"; then true" + " && if test \"$(echo ${HOSTNAME}|sed -E 's_^vted_teddie_g')\" != \"%s\"; then true" " && echo wrong host. Want %s found $HOSTNAME && false" " ;fi" " && ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null" @@ -85,19 +86,25 @@ static void visitDevice( struct FindFullDisks*app, const Device*device ){ char userAtEddie[64]; err = snprintf(userAtEddie, sizeof userAtEddie, "%s@%s", app->sshUser, device->eddieName); assert(err < sizeof userAtEddie); + char *childArgv[] = { "ssh", + "-oRemoteCommand=none", + "-oStrictHostKeyChecking=no", + "-oUserKnownHostsFile=/dev/null", + "-oConnectTimeout=4", + "-p", sshPortStr, + userAtEddie, + "--", "sh", "-c", eddieCmd, + NULL + }; + //fprintf(stderr, "CMDLINE:"); + //for( int i = 0 ; childArgv[i] != NULL ; ++i ) fprintf(stderr, " \"%s\"", childArgv[i]); + //fprintf(stderr, "\n\n"); app->child = (*app->garb)->newProcess(app->garb, &(struct Garbage_Process_Mentor){ .cls = app, .usePathSearch = !0, - .argv = (char*[]){ "ssh", - "-oRemoteCommand=none", - "-oStrictHostKeyChecking=no", - "-oUserKnownHostsFile=/dev/null", - "-p", sshPortStr, - userAtEddie, - "--", "sh", "-c", eddieCmd, - NULL, - }, + .argv = childArgv, .onStdout = Child_onStdout, + //.onStderr = Child_onStderr, .onJoined = Child_onJoined, }); assert(app->child != NULL); @@ -114,22 +121,22 @@ static void startApp( void*cls ){ static void setupExampleDevices( FindFullDisks*app ){ - app->devices_len = 3; + app->devices_len = 1; app->devices = realloc(NULL, app->devices_len*sizeof*app->devices); assert(app->devices != NULL || !"ENOMEM"); /**/ strcpy(app->devices[0].hostname, "fook-12345"); - strcpy(app->devices[0].eddieName, "eddie09845"); + strcpy(app->devices[0].eddieName, "eddie09815"); strcpy(app->devices[0].lastSeen, "2023-12-31T23:59:59"); /**/ - strcpy(app->devices[1].hostname, "fook-67890"); - strcpy(app->devices[1].eddieName, "eddie12345"); - strcpy(app->devices[1].lastSeen, "2023-12-31T23:42:42"); - /**/ - strcpy(app->devices[2].hostname, "lunkwill-12345"); - strcpy(app->devices[2].eddieName, "eddie09845"); - strcpy(app->devices[2].lastSeen, "2023-12-31T23:59:42"); - /**/ +// strcpy(app->devices[1].hostname, "fook-67890"); +// strcpy(app->devices[1].eddieName, "eddie12345"); +// strcpy(app->devices[1].lastSeen, "2023-12-31T23:42:42"); +// /**/ +// strcpy(app->devices[2].hostname, "lunkwill-12345"); +// strcpy(app->devices[2].eddieName, "eddie09845"); +// strcpy(app->devices[2].lastSeen, "2023-12-31T23:59:42"); +// /**/ } @@ -140,6 +147,8 @@ int main( int argc, char**argv ){ .sshPort = 22, .garb = NULL, .child = NULL, + .devices_len = 0, + .devices = NULL, }; setupExampleDevices(&app); app.garb = GarbageEnv_ctor(&(struct GarbageEnv_Mentor){ -- cgit v1.1 From 92395b77b59c423216015a88042df7a4061514a7 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 23 Feb 2024 23:30:01 +0100 Subject: Move some hdr files around. --- src/main/c/common/commonKludge.h | 16 ---------------- src/main/c/common/commonbase.h | 16 ++++++++++++++++ src/main/c/commonbase.h | 1 - 3 files changed, 16 insertions(+), 17 deletions(-) delete mode 100644 src/main/c/common/commonKludge.h create mode 100644 src/main/c/common/commonbase.h delete mode 100644 src/main/c/commonbase.h diff --git a/src/main/c/common/commonKludge.h b/src/main/c/common/commonKludge.h deleted file mode 100644 index e0f0cba..0000000 --- a/src/main/c/common/commonKludge.h +++ /dev/null @@ -1,16 +0,0 @@ - -typedef unsigned char uchar; - -#define STRQUOT_ASDFASDF(s) #s -#define STRQUOT(s) STRQUOT_ASDFASDF(s) -#ifndef PROJECT_VERSION -# define PROJECT_VERSION 0.0.0-SNAPSHOT -#endif - -#if __WIN32 - int _setmode(int,int); -# define FUCK_BROKEN_SYSTEMS() do{char a=0;for(;!(a&10);){_setmode(a++,32768);}}while(0) -#else -# define FUCK_BROKEN_SYSTEMS() -#endif - diff --git a/src/main/c/common/commonbase.h b/src/main/c/common/commonbase.h new file mode 100644 index 0000000..e0f0cba --- /dev/null +++ b/src/main/c/common/commonbase.h @@ -0,0 +1,16 @@ + +typedef unsigned char uchar; + +#define STRQUOT_ASDFASDF(s) #s +#define STRQUOT(s) STRQUOT_ASDFASDF(s) +#ifndef PROJECT_VERSION +# define PROJECT_VERSION 0.0.0-SNAPSHOT +#endif + +#if __WIN32 + int _setmode(int,int); +# define FUCK_BROKEN_SYSTEMS() do{char a=0;for(;!(a&10);){_setmode(a++,32768);}}while(0) +#else +# define FUCK_BROKEN_SYSTEMS() +#endif + diff --git a/src/main/c/commonbase.h b/src/main/c/commonbase.h deleted file mode 100644 index 8b13789..0000000 --- a/src/main/c/commonbase.h +++ /dev/null @@ -1 +0,0 @@ - -- cgit v1.1 From eebaf983bb03763e2091cc3524e235aea8d75710 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 8 Mar 2024 10:03:11 +0100 Subject: How the fuck can it be so freaking hard to just setup logging properly!! --- src/main/patch/houston/default.patch | 126 ++++------------------------ src/main/patch/houston/fixidiots.patch | 148 +++++++++++++++++++++++++++++++++ 2 files changed, 163 insertions(+), 111 deletions(-) create mode 100644 src/main/patch/houston/fixidiots.patch diff --git a/src/main/patch/houston/default.patch b/src/main/patch/houston/default.patch index 4169156..be226b3 100644 --- a/src/main/patch/houston/default.patch +++ b/src/main/patch/houston/default.patch @@ -5,8 +5,6 @@ "125344e940ebc090183bad7fc096938289f15e3f" from "2024-01-16". -diff --git a/pom.xml b/pom.xml -index 0ed4f7f3..b44c5693 100644 --- a/pom.xml +++ b/pom.xml @@ -73,7 +73,7 @@ @@ -18,6 +16,10 @@ index 0ed4f7f3..b44c5693 100644 9.4.43.v20210629 + + +--- a/pom.xml ++++ b/pom.xml @@ -301,4 +301,25 @@ @@ -44,13 +46,13 @@ index 0ed4f7f3..b44c5693 100644 + +
-diff --git a/houston-process/pom.xml b/houston-process/pom.xml -index 374dcb97..3c24937c 100644 + + --- a/houston-process/pom.xml +++ b/houston-process/pom.xml @@ -25,6 +25,26 @@ - + + + org.slf4j @@ -75,8 +77,8 @@ index 374dcb97..3c24937c 100644 ch.post.it.paisa.houston -diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -index 432efb01..d1729fe9 100644 + + --- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java +++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java @@ -68,6 +68,9 @@ public class Deployer { @@ -89,6 +91,10 @@ index 432efb01..d1729fe9 100644 setStartupProperties(); Props.prepare(); + + +--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java ++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java @@ -378,7 +378,7 @@ public class Deployer { // All other queues (typically to backend services) with a slow-down pattern after // failed delivery @@ -98,107 +104,5 @@ index 432efb01..d1729fe9 100644 LOGGER.info( "Redisques redis-client will created with MaxPoolSize: {}, MaxPoolWaiting: {}, MaxWaitingHandlers: {}", Props.getMaxRedisConnectionPoolSize4RedisQues(), -diff --git a/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java b/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java -new file mode 100644 -index 00000000..aa3aa2e0 ---- /dev/null -+++ b/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java -@@ -0,0 +1,98 @@ -+package org.apache.logging.slf4j; -+ -+import org.apache.logging.log4j.spi.ExtendedLogger; -+import org.slf4j.Marker; -+import org.slf4j.event.Level; -+import org.slf4j.spi.LocationAwareLogger; -+import org.slf4j.spi.LoggingEventBuilder; -+ -+import java.io.Serializable; -+ -+ -+/**

FU** this fu***** damn sh** code that still tries to use log4j, no matter -+ * how strong we tell it NOT to use it!

-+ *

This class only exists to prevent services from starting if IDEA still did miss -+ * the dependency changes in pom and still tries to use the wrong logger impl.

*/ -+public class Log4jLogger implements LocationAwareLogger, Serializable { -+ -+ private final org.slf4j.Logger log; -+ -+ Log4jLogger(final Log4jMarkerFactory markerFactory, final ExtendedLogger logger, final String name) { -+ this.log = new org.slf4j.simple.SimpleLoggerFactory().getLogger(name); -+ } -+ -+ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) { -+ throw new UnsupportedOperationException(/*TODO*/"Not impl yet"); -+ } -+ -+ @Override public String getName() { return log.getName(); } -+ @Override public LoggingEventBuilder makeLoggingEventBuilder(Level level) { return log.makeLoggingEventBuilder(level); } -+ @Override public LoggingEventBuilder atLevel(Level level) { return log.atLevel(level); } -+ @Override public boolean isEnabledForLevel(Level level) { return log.isEnabledForLevel(level); } -+ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); } -+ @Override public void trace(String s) { log.trace(s); } -+ @Override public void trace(String s, Object o) { log.trace(s, o); } -+ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); } -+ @Override public void trace(String s, Object... objects) { log.trace(s, objects); } -+ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); } -+ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); } -+ @Override public LoggingEventBuilder atTrace() { return log.atTrace(); } -+ @Override public void trace(Marker marker, String s) { log.trace(marker, s); } -+ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); } -+ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); } -+ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); } -+ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); } -+ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); } -+ @Override public void debug(String s) { log.debug(s); } -+ @Override public void debug(String s, Object o) { log.debug(s, o); } -+ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); } -+ @Override public void debug(String s, Object... objects) { log.debug(s, objects); } -+ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); } -+ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); } -+ @Override public void debug(Marker marker, String s) { log.debug(marker, s); } -+ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); } -+ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); } -+ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); } -+ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); } -+ @Override public LoggingEventBuilder atDebug() { return log.atDebug(); } -+ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); } -+ @Override public void info(String s) { log.info(s); } -+ @Override public void info(String s, Object o) { log.info(s, o); } -+ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); } -+ @Override public void info(String s, Object... objects) { log.info(s, objects); } -+ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); } -+ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); } -+ @Override public void info(Marker marker, String s) { log.info(marker, s); } -+ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); } -+ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); } -+ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); } -+ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); } -+ @Override public LoggingEventBuilder atInfo() { return log.atInfo(); } -+ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); } -+ @Override public void warn(String s) { log.warn(s); } -+ @Override public void warn(String s, Object o) { log.warn(s, o); } -+ @Override public void warn(String s, Object... objects) { log.warn(s, objects); } -+ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); } -+ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); } -+ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); } -+ @Override public void warn(Marker marker, String s) { log.warn(marker, s); } -+ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); } -+ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); } -+ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); } -+ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); } -+ @Override public LoggingEventBuilder atWarn() { return log.atWarn(); } -+ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); } -+ @Override public void error(String s) { log.error(s); } -+ @Override public void error(String s, Object o) { log.error(s, o); } -+ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); } -+ @Override public void error(String s, Object... objects) { log.error(s, objects); } -+ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); } -+ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); } -+ @Override public void error(Marker marker, String s) { log.error(marker, s); } -+ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); } -+ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); } -+ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); } -+ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); } -+ @Override public LoggingEventBuilder atError() { return log.atError(); } -+ -+} + + diff --git a/src/main/patch/houston/fixidiots.patch b/src/main/patch/houston/fixidiots.patch new file mode 100644 index 0000000..4050425 --- /dev/null +++ b/src/main/patch/houston/fixidiots.patch @@ -0,0 +1,148 @@ + + Why is it so fucking hard to just keep out all those random annoying logger + implementations?!? + + Who the heck wants to configure all of them, and if ONE is missed just have + all important error reports concealed to the nirvana? Who the fuck wants such + shit? + + Please: STOP THIS SHIT! Libraries solely have to depend on slf4j. As its name + already says, it is a FACADE! NOT AN IMPLEMENTATION! STOP MESSING THIS UP ALL + THE TIME WITH YET ANOTHER NEW SHITTY NERDY LOGGER IMPL! + + +--- /dev/null ++++ b/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java +@@ -0,0 +1,109 @@ ++package org.apache.logging.slf4j; ++ ++import org.apache.logging.log4j.spi.ExtendedLogger; ++import org.slf4j.Logger; ++import org.slf4j.Marker; ++import org.slf4j.event.Level; ++import org.slf4j.spi.LocationAwareLogger; ++import org.slf4j.spi.LoggingEventBuilder; ++ ++import java.io.Serializable; ++import java.lang.reflect.Constructor; ++import java.lang.reflect.InvocationTargetException; ++import java.lang.reflect.Method; ++ ++ ++/**

FU** this fu***** damn sh** code that still tries to use log4j, no matter ++ * how strong we tell it NOT to use it!

++ *

This class only exists to prevent services from starting if IDEA still did miss ++ * the dependency changes in pom and still tries to use the wrong logger impl.

*/ ++public class Log4jLogger implements LocationAwareLogger, Serializable { ++ ++ private final org.slf4j.Logger log; ++ ++ Log4jLogger(final Log4jMarkerFactory markerFactory, final ExtendedLogger logger, final String name) { ++ try { ++ Class logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory"); ++ Constructor ctor = logrFactClz.getConstructor(); ++ Method getLoggerFn = logrFactClz.getMethod("getLogger", String.class); ++ this.log = (Logger) getLoggerFn.invoke(ctor.newInstance(), name); ++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) { ++ throw new UnsupportedOperationException(ex); ++ } ++ } ++ ++ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) { ++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet"); ++ } ++ ++ @Override public String getName() { return log.getName(); } ++ @Override public LoggingEventBuilder makeLoggingEventBuilder(Level level) { return log.makeLoggingEventBuilder(level); } ++ @Override public LoggingEventBuilder atLevel(Level level) { return log.atLevel(level); } ++ @Override public boolean isEnabledForLevel(Level level) { return log.isEnabledForLevel(level); } ++ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); } ++ @Override public void trace(String s) { log.trace(s); } ++ @Override public void trace(String s, Object o) { log.trace(s, o); } ++ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); } ++ @Override public void trace(String s, Object... objects) { log.trace(s, objects); } ++ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); } ++ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); } ++ @Override public LoggingEventBuilder atTrace() { return log.atTrace(); } ++ @Override public void trace(Marker marker, String s) { log.trace(marker, s); } ++ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); } ++ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); } ++ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); } ++ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); } ++ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); } ++ @Override public void debug(String s) { log.debug(s); } ++ @Override public void debug(String s, Object o) { log.debug(s, o); } ++ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); } ++ @Override public void debug(String s, Object... objects) { log.debug(s, objects); } ++ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); } ++ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); } ++ @Override public void debug(Marker marker, String s) { log.debug(marker, s); } ++ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); } ++ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); } ++ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); } ++ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); } ++ @Override public LoggingEventBuilder atDebug() { return log.atDebug(); } ++ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); } ++ @Override public void info(String s) { log.info(s); } ++ @Override public void info(String s, Object o) { log.info(s, o); } ++ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); } ++ @Override public void info(String s, Object... objects) { log.info(s, objects); } ++ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); } ++ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); } ++ @Override public void info(Marker marker, String s) { log.info(marker, s); } ++ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); } ++ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); } ++ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); } ++ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); } ++ @Override public LoggingEventBuilder atInfo() { return log.atInfo(); } ++ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); } ++ @Override public void warn(String s) { log.warn(s); } ++ @Override public void warn(String s, Object o) { log.warn(s, o); } ++ @Override public void warn(String s, Object... objects) { log.warn(s, objects); } ++ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); } ++ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); } ++ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); } ++ @Override public void warn(Marker marker, String s) { log.warn(marker, s); } ++ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); } ++ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); } ++ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); } ++ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); } ++ @Override public LoggingEventBuilder atWarn() { return log.atWarn(); } ++ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); } ++ @Override public void error(String s) { log.error(s); } ++ @Override public void error(String s, Object o) { log.error(s, o); } ++ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); } ++ @Override public void error(String s, Object... objects) { log.error(s, objects); } ++ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); } ++ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); } ++ @Override public void error(Marker marker, String s) { log.error(marker, s); } ++ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); } ++ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); } ++ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); } ++ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); } ++ @Override public LoggingEventBuilder atError() { return log.atError(); } ++ ++} + + + +--- /dev/null ++++ b/houston-process/src/main/java/org/slf4j/reload4j/Reload4jLoggerAdapter.java +@@ -0,0 +1,16 @@ ++package org.slf4j.reload4j; ++ ++/**

FU** this fu***** damn sh** code that still tries to use log4j, no matter ++ * how strong we tell it NOT to use it!

++ *

This class only exists to prevent services from starting if IDEA still did miss ++ * the dependency changes in pom and still tries to use the wrong logger impl.

*/ ++public class Reload4jLoggerAdapter { ++ ++ public Reload4jLoggerAdapter(org.apache.log4j.Logger l) { ++ throw new UnsupportedOperationException("Fuck those fucking script-kiddies!" ++ + " How fucking hard can it be to just properly setup logging?!?" ++ + " Please !STOP! intermixing interfaces with implementations all the time!" ++ + " This fucking shit just conceals erros all the time! STOP IT!"); ++ } ++ ++} + -- cgit v1.1 From 54fa9f28e2dfe05ef8b960c45233a44e3415e0ee Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 8 Mar 2024 19:26:40 +0100 Subject: Dump some changes --- doc/note/openshift/openshift.txt | 2 +- .../unspecifiedgarbage/time/TimeUtils.java | 6 +-- src/main/patch/houston/default-20230203.patch | 52 -------------------- src/main/patch/houston/default-20230214.patch | 56 ---------------------- src/main/patch/houston/default-20230331.patch | 56 ---------------------- 5 files changed, 4 insertions(+), 168 deletions(-) delete mode 100644 src/main/patch/houston/default-20230203.patch delete mode 100644 src/main/patch/houston/default-20230214.patch delete mode 100644 src/main/patch/houston/default-20230331.patch diff --git a/doc/note/openshift/openshift.txt b/doc/note/openshift/openshift.txt index 48fda8f..88e33ee 100644 --- a/doc/note/openshift/openshift.txt +++ b/doc/note/openshift/openshift.txt @@ -77,7 +77,7 @@ HINT: ALL files from Current dir (.) will get uploaded (when global rsync not av ## up/down scale from cli. input von thom (20230815) - oc scale dc/preflux --replicas=1 + oc scale dc/${SVCNAME:?} --replicas=1 diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java index 2bb1bfb..d7d7ec8 100644 --- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java @@ -25,9 +25,9 @@ public class TimeUtils { * Find smallest distance assuming integers overflow "like a circle". * * Computers cannot represent all existing integers. Due to how - * integers are represented in computers, they are not infinite but - * more like a circle. Speak when we infinitely increment an - * integer, it overflows and (usually) continues to walk around this + * integers are represented in java, they are not infinite but + * more like a circle. Speak when we infinitely increment an integer, + * it overflows and (usually) continues to walk around this * (imaginary) circle. * * This function takes two of those numbers on this circle and diff --git a/src/main/patch/houston/default-20230203.patch b/src/main/patch/houston/default-20230203.patch deleted file mode 100644 index c1deeca..0000000 --- a/src/main/patch/houston/default-20230203.patch +++ /dev/null @@ -1,52 +0,0 @@ - - My custom patch ready-to-apply to have an "usable" houston. - - Contains: - - Simplelogger - - Listen localhost only - - Queue-Retry every 5 seconds. - -diff --git a/houston-process/pom.xml b/houston-process/pom.xml -index fff9c178..960c0098 100644 ---- a/houston-process/pom.xml -+++ b/houston-process/pom.xml -@@ -20,6 +20,26 @@ - true - - -+ -+ org.slf4j -+ slf4j-api -+ 1.7.25 -+ -+ -+ org.slf4j -+ slf4j-simple -+ 1.7.25 -+ -+ -+ org.slf4j -+ jcl-over-slf4j -+ 1.7.25 -+ -+ -+ org.slf4j -+ jul-to-slf4j -+ 1.7.25 -+ - - - ch.post.it.paisa.houston -diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -index ee7d8b02..b28ae8d6 100644 ---- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -@@ -256,7 +256,7 @@ public class Deployer { - qc.add(new QueueConfiguration().withPattern("brox-from-vehicles-.*").withRetryIntervals(10, 20, 30, 60, 120) - .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(10_000)); - // All other queues (typically to backend services) with a slow-down pattern after failed delivery -- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(10, 20, 30, 60, 120)); -+ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5));/*TODO revert*/ - - RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with() - .address(Address.redisquesAddress()) diff --git a/src/main/patch/houston/default-20230214.patch b/src/main/patch/houston/default-20230214.patch deleted file mode 100644 index 3f8fa16..0000000 --- a/src/main/patch/houston/default-20230214.patch +++ /dev/null @@ -1,56 +0,0 @@ - - My custom patch ready-to-apply to have an "usable" houston. - - Contains: - - Simplelogger - - Listen localhost only - - Queue-Retry every 5 seconds. - - Patch based on "houston-02.01.12.00" aka - "3c61ef7ded53d6340001d2b56cd829d2ae450580" from 2023-01-12. - -diff --git a/houston-process/pom.xml b/houston-process/pom.xml -index fff9c178..960c0098 100644 ---- a/houston-process/pom.xml -+++ b/houston-process/pom.xml -@@ -20,6 +20,26 @@ - - - -+ -+ org.slf4j -+ slf4j-api -+ 1.7.25 -+ -+ -+ org.slf4j -+ slf4j-simple -+ 1.7.25 -+ -+ -+ org.slf4j -+ jcl-over-slf4j -+ 1.7.25 -+ -+ -+ org.slf4j -+ jul-to-slf4j -+ 1.7.25 -+ - - - ch.post.it.paisa.houston -diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -index ee7d8b02..b28ae8d6 100644 ---- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -@@ -309,7 +309,7 @@ public class Deployer { - qc.add(new QueueConfiguration().withPattern("timetable-situation-trips-for-aws").withRetryIntervals(standardDelays) - .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(100)); - // All other queues (typically to backend services) with a slow-down pattern after failed delivery -- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays)); -+ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/)); - - RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with() - .address(Address.redisquesAddress()) - diff --git a/src/main/patch/houston/default-20230331.patch b/src/main/patch/houston/default-20230331.patch deleted file mode 100644 index 64d3628..0000000 --- a/src/main/patch/houston/default-20230331.patch +++ /dev/null @@ -1,56 +0,0 @@ - - My custom patch ready-to-apply to have an "usable" houston. - - Contains: - - Simplelogger - - Listen localhost only - - Queue-Retry every 5 seconds. - - Patch based on "houston-02.01.12.00" aka - "3c61ef7ded53d6340001d2b56cd829d2ae450580" from 2023-01-12. - -diff --git a/houston-process/pom.xml b/houston-process/pom.xml -index fff9c178..960c0098 100644 ---- a/houston-process/pom.xml -+++ b/houston-process/pom.xml -@@ -20,6 +20,26 @@ - - - -+ -+ org.slf4j -+ slf4j-api -+ 2.0.1 -+ -+ -+ org.slf4j -+ slf4j-simple -+ 2.0.1 -+ -+ -+ org.slf4j -+ jcl-over-slf4j -+ 2.0.1 -+ -+ -+ org.slf4j -+ jul-to-slf4j -+ 2.0.1 -+ - - - ch.post.it.paisa.houston -diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -index ee7d8b02..b28ae8d6 100644 ---- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -@@ -309,7 +309,7 @@ public class Deployer { - qc.add(new QueueConfiguration().withPattern("timetable-situation-trips-for-aws").withRetryIntervals(standardDelays) - .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(100)); - // All other queues (typically to backend services) with a slow-down pattern after failed delivery -- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays)); -+ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/)); - - RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with() - .address(Address.redisquesAddress()) - -- cgit v1.1 From e8d61dd86577f75e5b8442c8d0de850f762bcb76 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Mon, 18 Mar 2024 18:53:34 +0100 Subject: Use more meaninful ports for docker setup. --- doc/note/docker/Docker-Daemon-Install.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/note/docker/Docker-Daemon-Install.txt b/doc/note/docker/Docker-Daemon-Install.txt index c6a120a..1bfa6bb 100644 --- a/doc/note/docker/Docker-Daemon-Install.txt +++ b/doc/note/docker/Docker-Daemon-Install.txt @@ -27,10 +27,10 @@ section (HINT: "/etc/environment" does not work) [service] ... - Environment="HTTP_PROXY=http://10.0.2.2:31280" - Environment="http_proxy=http://10.0.2.2:31280" - Environment="HTTPS_PROXY=http://10.0.2.2:31280" - Environment="https_proxy=http://10.0.2.2:31280" + Environment="HTTP_PROXY=http://10.0.2.2:3128" + Environment="http_proxy=http://10.0.2.2:3128" + Environment="HTTPS_PROXY=http://10.0.2.2:3128" + Environment="https_proxy=http://10.0.2.2:3128" Environment="NO_PROXY=127.0.0.1,10.0.2.2,*.post.ch" Environment="no_proxy=127.0.0.1,10.0.2.2,*.post.ch" -- cgit v1.1 From a975ad404d78ff118f2c7fbd702f483f27707fa1 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 20 Mar 2024 18:00:30 +0100 Subject: Add some FailFast mocks related to vertx. --- .../gateleenKludge/FailFastMetricsOptions.java | 35 ++++++++++++++++++++++ .../FailFastVertxMetricsFactory.java | 27 +++++++++++++++++ 2 files changed, 62 insertions(+) create mode 100644 src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java create mode 100644 src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java new file mode 100644 index 0000000..c911061 --- /dev/null +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java @@ -0,0 +1,35 @@ +package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge; + +import io.vertx.core.json.JsonObject; +import io.vertx.core.metrics.MetricsOptions; +import io.vertx.core.spi.VertxMetricsFactory; + + +public class FailFastMetricsOptions extends io.vertx.core.metrics.MetricsOptions { + + private final String dbgMsg; + + public FailFastMetricsOptions( String dbgMsg ){ this.dbgMsg = dbgMsg; } + + public FailFastMetricsOptions(){ this(failCtor()); } + + private FailFastMetricsOptions( MetricsOptions o ){ this(failCtor()); } + + private FailFastMetricsOptions( JsonObject json ){ this(failCtor()); } + + private static String failCtor(){ throw new IllegalStateException("Do NOT use this ctor!"); } + + @Override public boolean isEnabled(){ throw new UnsupportedOperationException(dbgMsg); } + + @Override public MetricsOptions setEnabled(boolean en){ throw new UnsupportedOperationException(dbgMsg); } + + @Override public VertxMetricsFactory getFactory(){ throw new UnsupportedOperationException(dbgMsg); } + + @Override public MetricsOptions setFactory( VertxMetricsFactory f ){ throw new UnsupportedOperationException(dbgMsg); } + + @Override public JsonObject toJson(){ throw new UnsupportedOperationException(dbgMsg); } + + @Override public String toString(){ throw new UnsupportedOperationException(dbgMsg); } + +} + diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java new file mode 100644 index 0000000..fa0d7e1 --- /dev/null +++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java @@ -0,0 +1,27 @@ +package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge; + +import io.vertx.core.VertxOptions; +import io.vertx.core.impl.VertxBuilder; +import io.vertx.core.json.JsonObject; +import io.vertx.core.metrics.MetricsOptions; +import io.vertx.core.spi.metrics.VertxMetrics; + + +public class FailFastVertxMetricsFactory implements io.vertx.core.spi.VertxMetricsFactory { + + private final String dbgMsg; + + public FailFastVertxMetricsFactory(String dbgMsg ){ this.dbgMsg = dbgMsg; } + + @Override public void init(VertxBuilder b) { throw new UnsupportedOperationException(dbgMsg); } + + @Override public VertxMetrics metrics(VertxOptions o){ throw new UnsupportedOperationException(dbgMsg); } + + @Override public MetricsOptions newOptions() { throw new UnsupportedOperationException(dbgMsg); } + + @Override public MetricsOptions newOptions(MetricsOptions o) { throw new UnsupportedOperationException(dbgMsg); } + + @Override public MetricsOptions newOptions(JsonObject j) { throw new UnsupportedOperationException(dbgMsg); } + +} + -- cgit v1.1 From 64a3d0c6eb38afb9c106838a07c2d018d17c46e3 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 20 Mar 2024 18:01:13 +0100 Subject: (links) Add some links. --- doc/note/links/links.txt | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 2d918c2..b8570a9 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -135,6 +135,8 @@ Links (Aka argument amplifiers) - [case slf4j](http://www.slf4j.org/api/org/slf4j/Logger.html) - [General rules](https://gualtierotesta.github.io/java/tutorial-correct-slf4j-logging-usage-and-how-to-check-it/) - [logging guards](https://stackoverflow.com/a/12953090/4415884) +- [impl VS facade in lib](https://jira.post.ch/browse/SDCISA-15223) +- [drop logger impl from lib](https://github.com/swisspost/vertx-redisques/pull/153) ## Misleading log msg messages - "https://gitit.post.ch/projects/ISA/repos/zarquon/pull-requests/2/overview?commentId=61283" @@ -578,3 +580,12 @@ Links (Aka argument amplifiers) ## Tools like tcpdump are incredibly important - [tcpdump discovers the truth once more](https://jira.post.ch/browse/SDCISA-13746?focusedId=1939377&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1939377) +## MUST have an issue +- [no-issue PR gets sued](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/721/overview?commentId=349529) + +## STOP using stupid local times use UTC +- [3 simple rules](https://dev.to/corykeane/3-simple-rules-for-effectively-handling-dates-and-timezones-1pe0) + +## WARN clustering is NOT the solution +- [trin kill performance with shared counter](https://gitit.post.ch/projects/ISA/repos/trin/pull-requests/155/overview?commentId=352721) + -- cgit v1.1 From 46843e3d815edc19521051632a20a9bf6771c906 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 20 Mar 2024 19:27:42 +0100 Subject: DUMP misc changes. --- doc/note/openshift/dbg-mem-issues.txt | 31 +++ src/main/lua/paisa-logs/DigHoustonLogs.lua | 371 +++++++++++++++-------------- 2 files changed, 222 insertions(+), 180 deletions(-) diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index 0435081..16e574d 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -136,3 +136,34 @@ true \ && <"$F" ${MemLeakTry1:?} --date "${DATE:?}" > "${F%.*}.csv" \ ;done) + + + + + + +Zwischenfall auf INT 20231124 + + + +`# Create heap dump` +com.sun.management.dumpHeap("/usr/local/vertx/houston-storage-file/houston-___-heap-2023____-____Z.hprof", true) + +`# Inspect` +${OC:?} exec -i "$(${OC:?} get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && ls -Ahl' + +true `# Get made heap dump` \ + && if test ! -d heapDump; then echo "Dir heapDump missing"; false ;fi \ + && echo create checksum. \ + && ${OC:?} exec -i "$(${OC:?} get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && md5sum -b houston-*.hprof >> MD5SUM-$(date -u +%Y%m%d-%H%M%SZ)' \ + && echo checksum done. Begin dload. \ + && ${OC:?} exec -i "$(${OC:?} get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && tar c houston-*.hprof MD5SUM*' | (cd heapDump && tar x) \ + && echo dload done \ + && true + + + + + + + diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua index e217f82..45d4763 100644 --- a/src/main/lua/paisa-logs/DigHoustonLogs.lua +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -55,7 +55,7 @@ function loadFilters( that ) -- .."\tat org.swisspush.reststorage.FilePutter.FileCleanupManager.lambda.cleanupFile.0.FilePutter.java:192. ~.rest.storage.[0-9.]+.jar:..\n" -- .."\tat io.vertx.core.impl.future.FutureImpl.3.onSuccess.FutureImpl.java:141. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" -- }, --- + -- -- Seen: 2023-10-18 prod -- -- TODO open PR to add some logging so we have a chance to find submarine. -- { action = "drop", file = "ContextImpl", level = "ERROR", @@ -102,30 +102,30 @@ function loadFilters( that ) msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles" .." The timeout period of 30000ms has been exceeded while executing POST /from.houston/%d+/eagle/nsync/v1/push/" .."trillian.phonebooks.affiliated.planning.area.%d+.vehicles for server eddie%d+:7012", }, --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" --- .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip The timeout period of 30000ms has been exceeded" --- .." while executing PUT /houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip for server localhost:9089", }, --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" --- .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip Timeout$" }, --- -- Seen: 2023-10-18 prod --- -- I guess this happens if an eddie tries to put his "backup.zip" via shaky connection. --- { action = "drop", file = "FilePutter", level = "ERROR", --- msgEquals = "Put file failed:\nio.vertx.core.VertxException: Connection was closed", }, + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip The timeout period of 30000ms has been exceeded" + .." while executing PUT /houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip for server localhost:9089", }, + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip Timeout$" }, + -- Seen: 2023-10-18 prod + -- I guess this happens if an eddie tries to put his "backup.zip" via shaky connection. + { action = "drop", file = "FilePutter", level = "ERROR", + msgEquals = "Put file failed:\nio.vertx.core.VertxException: Connection was closed", }, -- Seen: 2024-01-10 prod, 2023-10-18 prod -- There are a whole bunch of related errors behind this filter which AFAICT all relate to shaky eddie connections. { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+" .." The timeout period of 30000ms has been exceeded while executing [DEGLOPSTU]+ /from.houston/%d+/eagle/[^ ]+ for server eddie%d+:7012$", }, --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "Forwarder", level = "ERROR", --- msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+ Connection was closed$", }, --- --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "ConnectionBase", level = "ERROR", msgEquals = "Connection reset by peer", }, --- --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "EventBusBridgeImpl", level = "ERROR", msgEquals = "SockJSSocket exception\nio.vertx.core.VertxException: Connection was closed", }, + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+ Connection was closed$", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "ConnectionBase", level = "ERROR", msgEquals = "Connection reset by peer", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "EventBusBridgeImpl", level = "ERROR", msgEquals = "SockJSSocket exception\nio.vertx.core.VertxException: Connection was closed", }, -- Seen: 2024-01-05 prod, 2023-10-18 prod -- Reported: TODO link existing issue here @@ -168,64 +168,64 @@ function loadFilters( that ) { action = "drop", file = "Utils", level = "ERROR", msgPattern = "^Exception occurred\n" .."io.vertx.core.eventbus.ReplyException: Timed out after waiting 30000.ms. for a reply. address: __vertx.reply.[0-9]+, repliedAddress: nsync.register.sync" }, --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", --- msgEquals = "Connection was closed\nio.vertx.core.VertxException: Connection was closed", }, --- --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "Forwarder", level = "ERROR", --- msgPattern = "^..... ................................ http://bistr:8080/bistr/vending/accounting/v1/information/lastSessionEnd Connection was closed$", }, --- --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "Forwarder", level = "ERROR", --- msgPattern = "..... ................................ http://bob:8080/bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" --- .." The timeout period of 30000ms has been exceeded while executing PUT /bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" --- .." for server bob:8080", }, --- --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.IllegalStateException: null", --- stackStartsWith = "" --- .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.checkEnded(HttpClientResponseImpl.java:150) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.endHandler(HttpClientResponseImpl.java:172) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler$7(Forwarder.java:476) ~[gateleen-routing-1.3.25.jar:?]\n" --- .."\tat io.vertx.core.impl.future.FutureImpl$3.onSuccess(FutureImpl.java:141) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat io.vertx.core.impl.future.FutureBase.emitSuccess(FutureBase.java:60) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat io.vertx.core.impl.future.FutureImpl.addListener(FutureImpl.java:196) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat io.vertx.core.impl.future.PromiseImpl.addListener(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat io.vertx.core.impl.future.FutureImpl.onComplete(FutureImpl.java:164) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat io.vertx.core.http.impl.HttpClientRequestBase.response(HttpClientRequestBase.java:240) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat io.vertx.core.http.HttpClientRequest.send(HttpClientRequest.java:330) ~[vertx-core-4.2.1.jar:4.2.1]\n" --- .."\tat org.swisspush.gateleen.routing.Forwarder$1.lambda$handle$1(Forwarder.java:377) ~[gateleen-routing-1.3.25.jar:?]\n" --- .."\tat org.swisspush.gateleen.core.http.BufferBridge.lambda$pump$0(BufferBridge.java:43) ~[gateleen-core-1.3.25.jar:?]\n" --- .."\tat io.vertx.core.impl.AbstractContext.dispatch(AbstractContext.java:100) ~[vertx-core-4.2.1.jar:4.2.1]\n", --- }, --- --- -- Seen: 2023-10-18 prod --- -- TODO Push issue to my backlog to fix this. --- { action = "drop", file = "ContextImpl", level = "ERROR", --- msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.", --- stackPattern = "^" --- .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:41. ~.gateleen.core.[0-9.]+.jar:..\n" --- .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:24. ~.gateleen.core.[0-9.]+.jar:..\n" --- .."\tat org.swisspush.gateleen.logging.LoggingWriteStream.drainHandler.LoggingWriteStream.java:73. ~.gateleen.logging.[0-9.]+.jar:..\n" --- .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:95. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" --- .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:39. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" --- .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.4.Forwarder.java:494. ~.gateleen.routing.[0-9.]+.jar:..\n" --- .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.5.Forwarder.java:503. ~.gateleen.routing.[0-9.]+.jar:..\n" --- .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", --- }, --- --- { action = "drop", file = "Forwarder", level = "ERROR", --- msgPattern = "^..... ................................ http://thought:8080/thought/vehicleoperation/recording/v1/events The timeout period of 60000ms has been exceeded while executing PUT /thought/vehicleoperation/recording/v1/events for server thought:8080$", --- }, --- --- -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. --- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" --- .." http://eddie%d+:7012/from.houston/%d+/eagle/vending/accounting/v1/users/%d+/years/%d+/months/%d%d/account Connection was closed$", }, --- -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. --- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" --- .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection was closed$", }, + -- Seen: 2023-10-18 prod + { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", + msgEquals = "Connection was closed\nio.vertx.core.VertxException: Connection was closed", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "^..... ................................ http://bistr:8080/bistr/vending/accounting/v1/information/lastSessionEnd Connection was closed$", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "..... ................................ http://bob:8080/bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" + .." The timeout period of 30000ms has been exceeded while executing PUT /bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" + .." for server bob:8080", }, + + -- Seen: 2023-10-18 prod + { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.IllegalStateException: null", + stackStartsWith = "" + .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.checkEnded(HttpClientResponseImpl.java:150) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.endHandler(HttpClientResponseImpl.java:172) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler$7(Forwarder.java:476) ~[gateleen-routing-1.3.25.jar:?]\n" + .."\tat io.vertx.core.impl.future.FutureImpl$3.onSuccess(FutureImpl.java:141) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.FutureBase.emitSuccess(FutureBase.java:60) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.FutureImpl.addListener(FutureImpl.java:196) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.PromiseImpl.addListener(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.FutureImpl.onComplete(FutureImpl.java:164) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.http.impl.HttpClientRequestBase.response(HttpClientRequestBase.java:240) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat io.vertx.core.http.HttpClientRequest.send(HttpClientRequest.java:330) ~[vertx-core-4.2.1.jar:4.2.1]\n" + .."\tat org.swisspush.gateleen.routing.Forwarder$1.lambda$handle$1(Forwarder.java:377) ~[gateleen-routing-1.3.25.jar:?]\n" + .."\tat org.swisspush.gateleen.core.http.BufferBridge.lambda$pump$0(BufferBridge.java:43) ~[gateleen-core-1.3.25.jar:?]\n" + .."\tat io.vertx.core.impl.AbstractContext.dispatch(AbstractContext.java:100) ~[vertx-core-4.2.1.jar:4.2.1]\n", + }, + + -- Seen: 2023-10-18 prod + -- TODO Push issue to my backlog to fix this. + { action = "drop", file = "ContextImpl", level = "ERROR", + msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.", + stackPattern = "^" + .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:41. ~.gateleen.core.[0-9.]+.jar:..\n" + .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:24. ~.gateleen.core.[0-9.]+.jar:..\n" + .."\tat org.swisspush.gateleen.logging.LoggingWriteStream.drainHandler.LoggingWriteStream.java:73. ~.gateleen.logging.[0-9.]+.jar:..\n" + .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:95. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" + .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:39. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" + .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.4.Forwarder.java:494. ~.gateleen.routing.[0-9.]+.jar:..\n" + .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.5.Forwarder.java:503. ~.gateleen.routing.[0-9.]+.jar:..\n" + .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", + }, + + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "^..... ................................ http://thought:8080/thought/vehicleoperation/recording/v1/events The timeout period of 60000ms has been exceeded while executing PUT /thought/vehicleoperation/recording/v1/events for server thought:8080$", + }, + + -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://eddie%d+:7012/from.houston/%d+/eagle/vending/accounting/v1/users/%d+/years/%d+/months/%d%d/account Connection was closed$", }, + -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" + .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection was closed$", }, -- Seen 2024-01-10 prod -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" @@ -234,112 +234,123 @@ function loadFilters( that ) -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" .." http://eddie%d+:7012/from.houston/%d+/eagle/timetable/notification/v1/planningareas/%d+/notifications/%x+ Connection was closed$", }, --- -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. --- { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection reset by peer$", }, - - ---- TODO Thought timeout? Can happen. But how often is ok? - ---- HINT: Occurred 15 times in 6 hrs (avg 1x per 24min) (2021-09-17_12:00 to 2021-09-17_18:00) - ---- Seen 2022-06-20, 2022-08-30 prod - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = "http://[aghilmostuwy]+:8080/[aghilmostuwy]+/vehicleoperation/recording/v1/.+ Timeout", }, - - ---- [SDCISA-9572] pag - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = "http://[8acgilmnpsvwy]+:[78]080/[_aegilmopstwy]+/.+ Connection was closed", }, - - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = "t.ch:7022/brox/from/vehicles/.+Connection refused: ", }, - - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = " http://%w+.pnet.ch:7022/brox/info Connection refused: %w+.pnet.ch/[%d.]+:7022" }, - - ---- TODO Analyze - ---- Observed 20014 times within 6 hrs (~1/sec) (2021-09-17_12:00 to 2021-09-17_18:00) - ---- HINT: Eddie connections issues also have around 20000 occurrences. Maybe related? - ---- Seen: 2021-09-17 - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = "http://eddie%d+:7012/from.houston/[^/]+/eagle/[^ ]+ Response already written. Not sure about the" - -- .." state. Closing server connection for stability reason", }, - - ---- TODO Analyze - ---- HINT: Occurred 1538 times in 6 hrs (~ 1x per 15sec) (2021-09-17_12:00 to 2021-09-17_18:00) - --{ action = "drop", file = "Forwarder", level = "WARN", - -- msgPattern = "Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+VertxException: Connection was closed", }, - - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = "http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection was closed", }, - - ---- TODO Analyze - ---- Seen: 2021-09-17, ..., 2022-06-20 - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = "http://pag:8080/pag/user/information/v1/directory/sync/request Timeout", }, - - ---- Seen 2021-10-25, 2022-08-30 prod - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = "http://[8acgilmnpsvwy]+:8080/[_aegilmopstwy]+/.+ Response already written. Not sure about the" - -- .." state. Closing server connection for stability reason", }, - - ---- TODO Analyze. - ---- Seen 2021-09-17, 2022-06-20 - --{ action = "drop", file = "BisectClient", level = "WARN", - -- msgPattern = "statusCode=503 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, - ---- Seen 2022-06-20 prod - --{ action = "drop", file = "BisectClient", level = "WARN", - -- msgPattern = "statusCode=504 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, - ---- TODO rm filter when fixed - ---- Reported: SDCISA-9573 - ---- Seen: 2022-08-30 prod, 2022-06-20, 2021-09-17 - --{ action = "drop", file = "BisectClient", level = "WARN", - -- msgPattern = "Index id=slarti%-vehicle%-setup%-sync%-%d+ rootPath=/houston/from/vehicles/%d+/vehicle/setup/v1 size=%d+ not %(nor no more%) ready. Aborting BisectClient", }, - - ---- Reported: SDCISA-9574 - ---- TODO rm when resolved - ---- Seen: 2021-09-17 2022-06-20, 2022-08-30 prod, - --{ action = "drop", file = "Utils", level = "ERROR", - -- msgPattern = "Exception occurred\n%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}", }, - - ---- TODO Analyze - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = "http://preflux:8080/preflux/data/preflux/rollout/hosts/eddie%d+/instances/default/situation Timeout", }, - - ---- TODO Analyze. - ---- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 - --{ action = "drop", file = "RedisQues", level = "WARN", - -- msgPattern = "Registration for queue .+ has changed to null", }, - --- -- Reported: SDCISA-10973 --- -- Seen: 2023-10-18 prod. --- { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", --- msgPattern = "The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+" --- .."/vehicle/backup/v1/executions/[0-9]+/backup.zip for server localhost:9089", }, + -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. + { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection reset by peer$", }, + + -- TODO Thought timeout? Can happen. But how often is ok? + -- HINT: Occurred 15 times in 6 hrs (avg 1x per 24min) (2021-09-17_12:00 to 2021-09-17_18:00) + -- Seen 2022-06-20, 2022-08-30 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "http://[aghilmostuwy]+:8080/[aghilmostuwy]+/vehicleoperation/recording/v1/.+ Timeout", }, + + -- [SDCISA-9572] pag + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "http://[8acgilmnpsvwy]+:[78]080/[_aegilmopstwy]+/.+ Connection was closed", }, + + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "t.ch:7022/brox/from/vehicles/.+Connection refused: ", }, + + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = " http://%w+.pnet.ch:7022/brox/info Connection refused: %w+.pnet.ch/[%d.]+:7022" }, + + -- TODO Analyze + -- Observed 20014 times within 6 hrs (~1/sec) (2021-09-17_12:00 to 2021-09-17_18:00) + -- HINT: Eddie connections issues also have around 20000 occurrences. Maybe related? + -- Seen: 2021-09-17 + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "http://eddie%d+:7012/from.houston/[^/]+/eagle/[^ ]+ Response already written. Not sure about the" + .." state. Closing server connection for stability reason", }, + + -- TODO Analyze + -- HINT: Occurred 1538 times in 6 hrs (~ 1x per 15sec) (2021-09-17_12:00 to 2021-09-17_18:00) + { action = "drop", file = "Forwarder", level = "WARN", + msgPattern = "Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+VertxException: Connection was closed", }, + + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection was closed", }, + + -- TODO Analyze + -- Seen: 2021-09-17, ..., 2022-06-20 + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "http://pag:8080/pag/user/information/v1/directory/sync/request Timeout", }, + + -- Seen 2021-10-25, 2022-08-30 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "http://[8acgilmnpsvwy]+:8080/[_aegilmopstwy]+/.+ Response already written. Not sure about the" + .." state. Closing server connection for stability reason", }, + + -- TODO Analyze. + -- Seen 2021-09-17, 2022-06-20 + { action = "drop", file = "BisectClient", level = "WARN", + msgPattern = "statusCode=503 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, + -- Seen 2022-06-20 prod + { action = "drop", file = "BisectClient", level = "WARN", + msgPattern = "statusCode=504 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, + -- TODO rm filter when fixed + -- Reported: SDCISA-9573 + -- Seen: 2022-08-30 prod, 2022-06-20, 2021-09-17 + { action = "drop", file = "BisectClient", level = "WARN", + msgPattern = "Index id=slarti%-vehicle%-setup%-sync%-%d+ rootPath=/houston/from/vehicles/%d+/vehicle/setup/v1 size=%d+ not %(nor no more%) ready. Aborting BisectClient", }, + + -- Reported: SDCISA-9574 + -- TODO rm when resolved + -- Seen: 2021-09-17 2022-06-20, 2022-08-30 prod, + { action = "drop", file = "Utils", level = "ERROR", + msgPattern = "Exception occurred\n%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}", }, + + -- TODO Analyze + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "http://preflux:8080/preflux/data/preflux/rollout/hosts/eddie%d+/instances/default/situation Timeout", }, + + -- TODO Analyze. + -- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 + { action = "drop", file = "RedisQues", level = "WARN", + msgPattern = "Registration for queue .+ has changed to null", }, + + -- Reported: SDCISA-10973 + -- Seen: 2023-10-18 prod. + { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", + msgPattern = "The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+" + .."/vehicle/backup/v1/executions/[0-9]+/backup.zip for server localhost:9089", }, -- Seen 2024-01-10 prod { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", msgPattern = "The timeout period of 30000ms has been exceeded while executing POST /from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles for server eddie%d+:7012" }, - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = "[%a-z0-9]+ [a-z0-9]+ http://eddie.....:7012/from%-houston/[^/]+/eagle/nsync/v1/push/trillian" - -- .."%-phonebooks%-affiliated%-planning%-area%-[^-]+%-vehicles The timeout period of 30000ms has been" - -- .." exceeded while executing POST /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks" - -- .."%-affiliated%-planning%-area%-[^%-]+-vehicles for server eddie.....:7012", }, - - ---- Reported: SDCISA-9578 - ---- TODO rm when fixed - ---- Seen 2022-08-30 prod, 2022-06-20 prod - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" - -- .." Connection reset by peer", }, - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" - -- .." Connection was closed", }, - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" - -- .." Response already written. Not sure about the state. Closing server connection for stability reason", }, - - ---- TODO analyze - ---- Seen 2022-06-20 prod - --{ action = "drop", file = "Forwarder", level = "ERROR", - -- msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/timetable/private/v1/trip/state/%w+.xml Connection was closed", }, + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "[%a-z0-9]+ [a-z0-9]+ http://eddie.....:7012/from%-houston/[^/]+/eagle/nsync/v1/push/trillian" + .."%-phonebooks%-affiliated%-planning%-area%-[^-]+%-vehicles The timeout period of 30000ms has been" + .." exceeded while executing POST /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks" + .."%-affiliated%-planning%-area%-[^%-]+-vehicles for server eddie.....:7012", }, + + -- Reported: SDCISA-9578 + -- TODO rm when fixed + -- Seen 2022-08-30 prod, 2022-06-20 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" + .." Connection reset by peer", }, + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" + .." Connection was closed", }, + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" + .." Response already written. Not sure about the state. Closing server connection for stability reason", }, + + -- TODO analyze + -- Seen 2024-03-20 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = "^..... [a-z0-9]+ http://vhfspa1.pnet.ch:7096/timetable/information/v1/tripinfo%?id%=I%-" }, + + -- TODO analyze + -- Seen 2022-06-20 prod + { action = "drop", file = "Forwarder", level = "ERROR", + msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/timetable/private/v1/trip/state/%w+.xml Connection was closed", }, + + -- TODO analyze + -- Seen 2024-03-20 prod + { action = "drop", file = "ContextImpl", level = "ERROR", + msgPattern = "^Unhandled exception\njava.lang.IllegalStateException: Response head already sent", }, + } end -- cgit v1.1 From 9bbe320028ec4fa15824c34fa1ab44690663b492 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 27 Mar 2024 20:21:48 +0100 Subject: Fix link. Some houston patching - Fix yagni link - add draft of loggers in houston/fixidiots.patch - Add some potential patches for houston future due to in progress changes. --- doc/note/links/links.txt | 2 +- src/main/patch/houston/fixidiots.patch | 217 +++++++++++++++++++++++++++++++++ src/main/patch/houston/future.patch | 47 +++++++ 3 files changed, 265 insertions(+), 1 deletion(-) create mode 100644 src/main/patch/houston/future.patch diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index b8570a9..08a498e 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -372,7 +372,7 @@ Links (Aka argument amplifiers) - "https://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html" ## YAGNI (but also KISS and DRY) -- [YAGNI, KISS and DRY](https://medium.com/swlh/yagni-and-dry-the-kiss-of-death-for-your-software-project-cfd44b0654b6#fc82) +- [YAGNI, KISS and DRY](https://medium.com/swlh/yagni-and-dry-the-kiss-of-death-for-your-software-project-cfd44b0654b6) - [eagle queue json only](https://gitit.post.ch/projects/ISA/repos/eagle/pull-requests/331/overview?commentId=236944) - [How to repair KISS](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff) - [won't stream, bcause YAGNI](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/5/overview?commentId=316503) diff --git a/src/main/patch/houston/fixidiots.patch b/src/main/patch/houston/fixidiots.patch index 4050425..7af8f9d 100644 --- a/src/main/patch/houston/fixidiots.patch +++ b/src/main/patch/houston/fixidiots.patch @@ -146,3 +146,220 @@ + +} + +--- /dev/null ++++ b/houston-process/src/main/java/org/slf4j/reload4j/Reload4jServiceProvider.java +@@ -0,0 +1,64 @@ ++package org.slf4j.reload4j; ++ ++import org.slf4j.ILoggerFactory; ++import org.slf4j.IMarkerFactory; ++import org.slf4j.Logger; ++import org.slf4j.Marker; ++import org.slf4j.spi.MDCAdapter; ++import org.slf4j.spi.SLF4JServiceProvider; ++ ++import java.lang.reflect.InvocationTargetException; ++import java.lang.reflect.Method; ++import java.util.Deque; ++import java.util.Map; ++ ++ ++/**

How many of those fu**ing damn stupid idiots are still out there ++ * continuing to stubbornly include those stupid logger impls with their ++ * libraries?!?

*/ ++public class Reload4jServiceProvider implements SLF4JServiceProvider, ILoggerFactory, IMarkerFactory, MDCAdapter { ++ ++ private final Object slf4jSimpleLoggerFactory; ++ private final Method getLoggerFn; ++ ++ public Reload4jServiceProvider() { ++ try { ++ Class logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory"); ++ slf4jSimpleLoggerFactory = logrFactClz.getConstructor().newInstance(); ++ getLoggerFn = logrFactClz.getMethod("getLogger", String.class); ++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) { ++ throw new UnsupportedOperationException(ex); ++ } ++ } ++ ++ @Override public ILoggerFactory getLoggerFactory() { return this; } ++ @Override public IMarkerFactory getMarkerFactory() { return this; } ++ @Override public MDCAdapter getMDCAdapter() { return this; } ++ @Override public String getRequestedApiVersion() { return "2.0"; } ++ @Override public void initialize() {} ++ ++ @Override ++ public Logger getLogger(String name) { ++ try { ++ return (Logger) getLoggerFn.invoke(slf4jSimpleLoggerFactory, name); ++ } catch (IllegalAccessException | InvocationTargetException ex) { ++ throw new RuntimeException(ex); ++ } ++ } ++ ++ @Override public Marker getMarker(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public boolean exists(String s) { assert false : "TODO not impl yet"; return false; } ++ @Override public boolean detachMarker(String s) { assert false : "TODO not impl yet"; return false; } ++ @Override public Marker getDetachedMarker(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public void put(String s, String s1) { assert false : "TODO not impl yet"; } ++ @Override public String get(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public void remove(String s) { assert false : "TODO not impl yet"; } ++ @Override public void clear() { assert false : "TODO not impl yet"; } ++ @Override public Map getCopyOfContextMap() { assert false : "TODO not impl yet"; return null; } ++ @Override public void setContextMap(Map map) { assert false : "TODO not impl yet"; } ++ @Override public void pushByKey(String s, String s1) { assert false : "TODO not impl yet"; } ++ @Override public String popByKey(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public Deque getCopyOfDequeByKey(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public void clearDequeByKey(String s) { assert false : "TODO not impl yet"; } ++ ++} + + +--- /dev/null ++++ b/houston-process/src/main/java/org/apache/logging/slf4j/SLF4JServiceProvider.java +@@ -0,0 +1,62 @@ ++package org.apache.logging.slf4j; ++ ++import org.slf4j.ILoggerFactory; ++import org.slf4j.IMarkerFactory; ++import org.slf4j.Logger; ++import org.slf4j.Marker; ++import org.slf4j.spi.MDCAdapter; ++ ++import java.lang.reflect.InvocationTargetException; ++import java.lang.reflect.Method; ++import java.util.Deque; ++import java.util.Map; ++ ++ ++/**

How many of those fu**ing damn stupid idiotic libs are still out there ++ * continuing to stubbornly include those stupid logger impls?!?

*/ ++public class SLF4JServiceProvider implements org.slf4j.spi.SLF4JServiceProvider, ILoggerFactory, IMarkerFactory, MDCAdapter { ++ ++ private final Object slf4jSimpleLoggerFactory; ++ private final Method getLoggerFn; ++ ++ public SLF4JServiceProvider() { ++ try { ++ Class logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory"); ++ slf4jSimpleLoggerFactory = logrFactClz.getConstructor().newInstance(); ++ getLoggerFn = logrFactClz.getMethod("getLogger", String.class); ++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) { ++ throw new UnsupportedOperationException(ex); ++ } ++ } ++ ++ @Override public ILoggerFactory getLoggerFactory() { return this; } ++ @Override public IMarkerFactory getMarkerFactory() { return this; } ++ @Override public MDCAdapter getMDCAdapter() { return this; } ++ @Override public String getRequestedApiVersion() { return "2.0"; } ++ @Override public void initialize() {} ++ ++ @Override ++ public Logger getLogger(String name) { ++ try { ++ return (Logger) getLoggerFn.invoke(slf4jSimpleLoggerFactory, name); ++ } catch (IllegalAccessException | InvocationTargetException ex) { ++ throw new RuntimeException(ex); ++ } ++ } ++ ++ @Override public Marker getMarker(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public boolean exists(String s) { assert false : "TODO not impl yet"; return false; } ++ @Override public boolean detachMarker(String s) { assert false : "TODO not impl yet"; return false; } ++ @Override public Marker getDetachedMarker(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public void put(String s, String s1) { assert false : "TODO not impl yet"; } ++ @Override public String get(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public void remove(String s) { assert false : "TODO not impl yet"; } ++ @Override public void clear() { assert false : "TODO not impl yet"; } ++ @Override public Map getCopyOfContextMap() { assert false : "TODO not impl yet"; return null; } ++ @Override public void setContextMap(Map map) { assert false : "TODO not impl yet"; } ++ @Override public void pushByKey(String s, String s1) { assert false : "TODO not impl yet"; } ++ @Override public String popByKey(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public Deque getCopyOfDequeByKey(String s) { assert false : "TODO not impl yet"; return null; } ++ @Override public void clearDequeByKey(String s) { assert false : "TODO not impl yet"; } ++ ++} + + +--- /dev/null ++++ b/houston-process/src/main/java/net/bull/javamelody/internal/common/Log4J2Logger.java +@@ -0,0 +1,38 @@ ++package net.bull.javamelody.internal.common; ++ ++import org.slf4j.Logger; ++ ++import javax.servlet.http.HttpServletRequest; ++import java.lang.reflect.InvocationTargetException; ++import java.lang.reflect.Method; ++ ++ ++/**

How many of those fu**ing damn stupid idiot libs are still out there ++ * continuing to stubbornly include those stupid logger impls?!?

*/ ++public class Log4J2Logger implements net.bull.javamelody.JavaMelodyLogger { ++ ++ private final org.slf4j.Logger log; ++ ++ public Log4J2Logger(){ ++ try { ++ Class logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory"); ++ Object slf4jSimpleLoggerFactory = logrFactClz.getConstructor().newInstance(); ++ Method getLoggerFn = logrFactClz.getMethod("getLogger", String.class); ++ this.log = (Logger) getLoggerFn.invoke(slf4jSimpleLoggerFactory, "net.bull.javamelody"); ++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) { ++ throw new UnsupportedOperationException(ex); ++ } ++ } ++ ++ @Override public void debug(String s) { log.debug(s); } ++ @Override public void debug(String s, Throwable ex) { log.debug(s, ex); } ++ @Override public void info(String s) { log.info(s); } ++ @Override public void info(String s, Throwable ex) { log.info(s, ex);} ++ @Override public void warn(String s, Throwable ex) { log.warn(s, ex);} ++ @Override public void logHttpRequest( ++ HttpServletRequest httpRequest, String requestName, long duration, boolean systemError, int responseStatus, long responseSize, String loggerName ++ ){ ++ if (log.isInfoEnabled()) log.info("{}", LOG.buildLogMessage(httpRequest, duration, systemError, responseStatus, responseSize)); ++ } ++ ++} + + +--- /dev/null ++++ b/houston-process/src/main/java/org/eclipse/jetty/util/log/Slf4jLog.java +@@ -0,0 +1,32 @@ ++package org.eclipse.jetty.util.log; ++ ++import org.slf4j.LoggerFactory; ++ ++ ++/** Yet another fu**ing bastard lib having its own shiny stupid loggers. */ ++public class Slf4jLog { ++ ++ private final org.slf4j.Logger log; ++ ++ public Slf4jLog() { ++ this.log = LoggerFactory.getLogger("org.eclipse.jetty.util.log"); ++ } ++ ++ public Slf4jLog(String name) { ++ this.log = LoggerFactory.getLogger(name); ++ } ++ ++ public String getName() { return log.getName(); } ++ public void warn(String msg, Object... args) { log.warn(msg, args); } ++ public void warn(Throwable thrown) { log.warn("", thrown); } ++ public void warn(String msg, Throwable thrown) { log.warn(msg, thrown); } ++ public void info(String msg, Object... args) { log.info(msg, args); } ++ public void info(Throwable thrown) { log.info("", thrown); } ++ public void info(String msg, Throwable thrown) { log.info(msg, thrown); } ++ public void debug(String msg, Object... args) { log.debug(msg, args); } ++ public void debug(String msg, long arg) { if (log.isDebugEnabled()) log.debug(msg, arg); } ++ public void debug(Throwable thrown) { this.debug("", thrown); } ++ public void debug(String msg, Throwable thrown) { log.debug(msg, thrown); } ++ public boolean isDebugEnabled() { return log.isDebugEnabled(); } ++ public void setDebugEnabled(boolean enabled) { log.warn("setDebugEnabled not implemented"); } ++ ++} + diff --git a/src/main/patch/houston/future.patch b/src/main/patch/houston/future.patch new file mode 100644 index 0000000..2ac5922 --- /dev/null +++ b/src/main/patch/houston/future.patch @@ -0,0 +1,47 @@ + + Some patches that maybe will become relevant in future. For example bcause a + PR is no yet merged or similar. + + +--- /dev/null ++++ b/houston-process/src/main/java/net/bull/javamelody/internal/common/Log4J2Logger.java +@@ -22,1 +22,1 @@ + if (!iLoaders.hasNext()) throw new IllegalStateException("Too few logger impls"); + SLF4JServiceProvider loggerProvider = iLoaders.next(); +- if (iLoaders.hasNext()) throw new IllegalStateException("Too many logger impls"); ++ if (!(loggerProvider instanceof org.slf4j.simple.SimpleServiceProvider) && iLoaders.hasNext()) throw new IllegalStateException("Too many logger impls"); + loggerProvider.initialize(); + ILoggerFactory loggerFactory = loggerProvider.getLoggerFactory(); + + +--- /dev/null ++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/BadLoggerImplKiller.java +@@ -0,0 +1,26 @@ ++package ch.post.it.paisa.houston.process.main; ++ ++import org.slf4j.spi.SLF4JServiceProvider; ++ ++import java.util.ServiceLoader; ++ ++ ++public class BadLoggerImplKiller { ++ ++ public static void assertExactlyOneLoggerImplPresent(){ ++ Class log4jProviderClz, simpleProviderClz; ++ try { ++ log4jProviderClz = Class.forName("org.apache.logging.slf4j.SLF4JServiceProvider"); ++ simpleProviderClz = Class.forName("org.slf4j.simple.SimpleServiceProvider"); ++ }catch( ClassNotFoundException ex ){ ++ throw new RuntimeException(ex); ++ } ++ for( SLF4JServiceProvider provider : ServiceLoader.load(SLF4JServiceProvider.class) ){ ++ Class providerClass = provider.getClass(); ++ if( log4jProviderClz.isAssignableFrom(providerClass) ) continue; ++ if( simpleProviderClz.isAssignableFrom(providerClass) ) continue; ++ throw new IllegalStateException("Go away with this ugly logger: "+ providerClass.getName()); ++ } ++ } ++ ++} + + -- cgit v1.1 From cec0b20ac371fdb9e1293d425a3f8f193391427c Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 28 Mar 2024 16:04:43 +0100 Subject: Migrate redis doc --- doc/note/redis/redis.txt | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 doc/note/redis/redis.txt diff --git a/doc/note/redis/redis.txt b/doc/note/redis/redis.txt new file mode 100644 index 0000000..51bf541 --- /dev/null +++ b/doc/note/redis/redis.txt @@ -0,0 +1,31 @@ + +Redis +================ + + +## Run redis commands from cli + +redis-cli -a ***** SCAN 0 + + +## Example commands + +SCAN 0 COUNT 42 +TYPE key +GET redisques:queues:vehicleoperation-events-for-vehicle-9942 +LRANGE key start stop +LRANGE key 1 1 (get head) +LRANGE key -1 -1 (get tail) + + +## List all keys (WARN: do NOT in production!) + +KEYS *:part:inside-the:key:* +KEYS redisques:queues:vehicleoperation-events-for-vehicle-* + + +## Delete by pattern + +redis-cli --scan --pattern schedulers:* | xargs redis-cli del + + -- cgit v1.1 From a15e0994246d33671081b59c42b91158a2fc998a Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 28 Mar 2024 18:10:46 +0100 Subject: Begun script to create some queues (does not work yet) --- doc/note/redis/redis.txt | 1 + src/main/nodejs/misc/ProduceLotsOfQueues.js | 81 +++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+) create mode 100644 src/main/nodejs/misc/ProduceLotsOfQueues.js diff --git a/doc/note/redis/redis.txt b/doc/note/redis/redis.txt index 51bf541..d64948a 100644 --- a/doc/note/redis/redis.txt +++ b/doc/note/redis/redis.txt @@ -20,6 +20,7 @@ LRANGE key -1 -1 (get tail) ## List all keys (WARN: do NOT in production!) +SCAN 0 MATCH *part:of:key:* COUNT 42 KEYS *:part:inside-the:key:* KEYS redisques:queues:vehicleoperation-events-for-vehicle-* diff --git a/src/main/nodejs/misc/ProduceLotsOfQueues.js b/src/main/nodejs/misc/ProduceLotsOfQueues.js new file mode 100644 index 0000000..9f0101e --- /dev/null +++ b/src/main/nodejs/misc/ProduceLotsOfQueues.js @@ -0,0 +1,81 @@ +;(function(){ + + const http = require("http"); + const log = process.stderr; + const out = process.stdout; + + setTimeout(main); return; + + + function main(){ + const app = Object.seal({ + isHelp: false, + host: "localhost", + port: 7013, + uri: "/houston/tmp/gugus/bar", + queueName: "my-gaga-queue", + }); + if( parseArgs(app, process.argv) !== 0 ) process.exit(1); + if( app.isHelp ){ printHelp(); return; } + run(app); + } + + + + function printHelp(){ + out.write("\n" + +" Produce a bunch of gateleen queues\n" + +" \n" + +" Options:\n" + +" \n" + +" \n") + } + + + function parseArgs( app, argv ){ + var isYolo = false; + for( var iA = 2 ; iA < argv.length ; ++iA ){ + var arg = argv[iA]; + if( arg == "--help" ){ + app.isHelp = true; return 0; + }else if( arg == "--yolo" ){ + isYolo = true; + }else{ + log.write("EINVAL: "+ arg +"\n"); + return -1; + } + } + if( !isYolo ){ log.write("EINVAL: wanna yolo?\n"); return; } + return 0; + } + + + function run( app ){ + foo(app); + } + + + function foo( app ){ + const req = Object.seal({ + base: null, + }); + req.base = http.request({ + host: app.host, port: app.port, + method: "PUT", path: app.uri, + headers: { + "X-Queue": app.queueName, + "X-Queue-Expire-After": 9999999, + }, + }); + req.base.on("response", onResponse.bind(0, app)); + req.base.end("{\"guguseli\":42}\n"); + } + + + function onResponse( app, rsp ){ + log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n"); + for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n"); + } + + +}()); -- cgit v1.1 From 074b68ef92389c4a916f1e74515a391c4aa5d5f6 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 3 Apr 2024 16:05:00 +0200 Subject: Add nodejs kludge script to produce some gateleen queueus. --- src/main/nodejs/misc/ProduceLotsOfQueues.js | 60 +++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 12 deletions(-) diff --git a/src/main/nodejs/misc/ProduceLotsOfQueues.js b/src/main/nodejs/misc/ProduceLotsOfQueues.js index 9f0101e..890f867 100644 --- a/src/main/nodejs/misc/ProduceLotsOfQueues.js +++ b/src/main/nodejs/misc/ProduceLotsOfQueues.js @@ -3,6 +3,7 @@ const http = require("http"); const log = process.stderr; const out = process.stdout; + const NOOP = function(){}; setTimeout(main); return; @@ -51,30 +52,65 @@ function run( app ){ - foo(app); + //placeHook(app); + putSomeNonsense(app); } - function foo( app ){ + function placeHook( app ){ const req = Object.seal({ base: null, + app: app, }); req.base = http.request({ host: app.host, port: app.port, - method: "PUT", path: app.uri, - headers: { - "X-Queue": app.queueName, - "X-Queue-Expire-After": 9999999, - }, + method: "PUT", path: app.uri +"/_hooks/listeners/http", + //headers: { + // "X-Expire-After": "42", + //}, }); - req.base.on("response", onResponse.bind(0, app)); - req.base.end("{\"guguseli\":42}\n"); + req.base.on("response", onResponse.bind(0, req)); + req.base.end(JSON.stringify({ + destination: "http://127.0.0.1:7099/guguseli", + queueExpireAfter/*seconds*/: 42, + })); + function onResponse( req, rsp ){ + var app = req.app; + log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n"); + for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n"); + } } - function onResponse( app, rsp ){ - log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n"); - for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n"); + function putSomeNonsense( app ){ + const nonsense = Object.seal({ + app: app, + req: null, + i: 0, + limit: 42, + }); + putNextRequest(nonsense); + function putNextRequest( nonsense ){ + nonsense.req = http.request({ + host: app.host, port: app.port, + method: "PUT", path: app.uri +"/foo/"+ nonsense.i, + headers: { + "X-Queue": app.queueName +"-"+ nonsense.i, + "X-Queue-Expire-After": 9999999, + }, + }); + nonsense.req.on("response", onResponse.bind(0, nonsense)); + nonsense.req.end("{\"guguseli\":\""+ new Date().toISOString() +"\"}\n"); + } + function onResponse( nonsense, rsp ){ + var app = nonsense.app; + log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n"); + for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n"); + rsp.on("data", NOOP); + if( nonsense.i++ < nonsense.limit ){ + putNextRequest(nonsense); + } + } } -- cgit v1.1 From 193315d56302bac67b4a1688e892ac499d4052b4 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 5 Apr 2024 18:32:46 +0200 Subject: Continue queue flooder which is handy to reproduce bugs related to queuing. --- doc/note/links/links.txt | 1 + src/main/nodejs/misc/ProduceLotsOfQueues.js | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 08a498e..f90be46 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -137,6 +137,7 @@ Links (Aka argument amplifiers) - [logging guards](https://stackoverflow.com/a/12953090/4415884) - [impl VS facade in lib](https://jira.post.ch/browse/SDCISA-15223) - [drop logger impl from lib](https://github.com/swisspost/vertx-redisques/pull/153) +- [Should my library attempt to configure logging?](https://www.slf4j.org/faq.html#configure_logging) ## Misleading log msg messages - "https://gitit.post.ch/projects/ISA/repos/zarquon/pull-requests/2/overview?commentId=61283" diff --git a/src/main/nodejs/misc/ProduceLotsOfQueues.js b/src/main/nodejs/misc/ProduceLotsOfQueues.js index 890f867..810ac63 100644 --- a/src/main/nodejs/misc/ProduceLotsOfQueues.js +++ b/src/main/nodejs/misc/ProduceLotsOfQueues.js @@ -104,8 +104,10 @@ } function onResponse( nonsense, rsp ){ var app = nonsense.app; - log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n"); - for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n"); + if( rsp.statusCode != 200 ){ + log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n"); + for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n"); + } rsp.on("data", NOOP); if( nonsense.i++ < nonsense.limit ){ putNextRequest(nonsense); -- cgit v1.1 From b55041beb9c8d2fc60d8657f6d38f665b3418004 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Mon, 8 Apr 2024 21:29:11 +0200 Subject: Add assert_is_tYPE as a template. --- src/main/c/common/assert_is.h | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 src/main/c/common/assert_is.h diff --git a/src/main/c/common/assert_is.h b/src/main/c/common/assert_is.h new file mode 100644 index 0000000..b6e3132 --- /dev/null +++ b/src/main/c/common/assert_is.h @@ -0,0 +1,39 @@ + +#if !NDEBUG +#define TPL_assert_is(T, PRED) static inline T*assert_is_##T(void*p,\ +const char*f,int l){if(p==NULL){fprintf(stderr,"assert(" STR_QUOT(T)\ +" != NULL) %s:%d\n",f,l);abort();}T*obj=p;if(!PRED){fprintf(stderr,\ +"ssert(type is \""STR_QUOT(T)"\") %s:%d\n",f,l);abort();}return p; } +#else +#define TPL_assert_is(T, PRED) static inline T*assert_is_##T(void*p,\ +const char*f,int l){return p;} +#endif + + + +/* Example usage: */ + +/* add some magic to your struct under check */ +typedef struct Person Person; +struct Person { + char tYPE[sizeof"Hi, I'm a Person"]; +}; + +/* instantiate a checker */ +TPL_assert_is(Person, !strcmp(obj->tYPE, "Hi, I'm a Person")); +#define assert_is_Person(p) assert_is_Person(p, __FILE__, __LINE__) + +/* make sure magic is initialized (ALSO MAKE SURE TO PROPERLY INVALIDATE + * IT IN DTOR!)*/ +static void someCaller( void ){ + Person p = {0}; + strcpy(p.tYPE, "Hi, I'm a Person"); + void *ptr = p; /*whops compiler cannot help us any longer*/ + someCallee(ptr); +} + +/* verify you reall got a Person*/ +static void someCallee( void*shouldBeAPerson ){ + Person *p = assert_is_Person(shouldBeAPerson); +} + -- cgit v1.1 From 0cf6708c16028a5ac8974270275bb6531f18ae2d Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 10 Apr 2024 19:54:10 +0200 Subject: Reorg links. Cleanup DigHoustonLogs.lua --- doc/note/links/links.txt | 9 +- src/main/lua/paisa-logs/DigHoustonLogs.lua | 305 +++++++---------------------- 2 files changed, 69 insertions(+), 245 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index f90be46..78e045a 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -227,9 +227,6 @@ Links (Aka argument amplifiers) - [think please ...](https://m.youtube.com/watch?v=hSfylUXhpkA) - [java streams are ugly](https://gitit.post.ch/projects/ISA/repos/nsync/pull-requests/55/overview?commentId=328210) -## The Only way to Format Dates ISO 8601 -- "https://xkcd.com/1179/" - ## Backward compatibility, Breaking Changes - "https://www.redstar.be/backward-compatibility-in-software-development-what-and-why/" - [UserInformation SAP 2023](https://gitit.post.ch/projects/ISA/repos/user-information-api/pull-requests/20/overview?commentId=270157) @@ -584,9 +581,7 @@ Links (Aka argument amplifiers) ## MUST have an issue - [no-issue PR gets sued](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/721/overview?commentId=349529) -## STOP using stupid local times use UTC +## Format Date ISO 8601, UTC GMT localtime +- [public service announcement](https://xkcd.com/1179/) - [3 simple rules](https://dev.to/corykeane/3-simple-rules-for-effectively-handling-dates-and-timezones-1pe0) -## WARN clustering is NOT the solution -- [trin kill performance with shared counter](https://gitit.post.ch/projects/ISA/repos/trin/pull-requests/155/overview?commentId=352721) - diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua index 45d4763..92ef035 100644 --- a/src/main/lua/paisa-logs/DigHoustonLogs.lua +++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua @@ -34,42 +34,29 @@ end function loadFilters( that ) assert(not that.filters) that.filters = { + -- General: Append new rules AT END if not closely related to another one. --- { action = "drop", beforeDate = "2023-10-18 03:00:00.000", }, +-- { action = "drop", beforeDate = "2024-10-18 03:00:00.000", }, -- { action = "drop", afterDate = "2024-01-31 23:59:59.999", }, { action = "drop", level = "TRACE" }, { action = "drop", level = "DEBUG" }, { action = "drop", level = "INFO" }, - { action = "drop", level = "WARN" }, - --- -- Seen: 2023-10-18 prod --- { action = "drop", file = "ContextImpl", level = "ERROR", --- msgEquals = "Unhandled exception\njava.lang.NullPointerException: No null handler accepted", --- stackPattern = "^" --- .."\tat java.util.Objects.requireNonNull.Objects.java:246. ~..:..\n" --- .."\tat io.vertx.core.impl.future.FutureImpl.onComplete.FutureImpl.java:132. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" --- .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete.PromiseImpl.java:23. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" --- .."\tat io.vertx.core.file.impl.FileSystemImpl.delete.FileSystemImpl.java:290. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" --- .."\tat org.swisspush.reststorage.FilePutter.FileCleanupManager.deleteFile.FilePutter.java:218. ~.rest.storage.[0-9.]+.jar:..\n" --- .."\tat org.swisspush.reststorage.FilePutter.FileCleanupManager.lambda.cleanupFile.0.FilePutter.java:192. ~.rest.storage.[0-9.]+.jar:..\n" --- .."\tat io.vertx.core.impl.future.FutureImpl.3.onSuccess.FutureImpl.java:141. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" --- }, - --- -- Seen: 2023-10-18 prod --- -- TODO open PR to add some logging so we have a chance to find submarine. --- { action = "drop", file = "ContextImpl", level = "ERROR", --- msgEquals = "Unhandled exception\njava.lang.IllegalStateException: Response head already sent", --- stackPattern = "^" --- .."\tat io.vertx.core.http.impl.Http1xServerResponse.checkHeadWritten.Http1xServerResponse.java:684. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" --- .."\tat io.vertx.core.http.impl.Http1xServerResponse.setStatusCode.Http1xServerResponse.java:153. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n" --- .."\tat org.swisspush.gateleen.routing.Forwarder.lambda.getAsyncHttpClientResponseHandler.7.Forwarder.java:430. ~.gateleen.routing.[0-9.]+.jar:..\n" --- .."\tat io.vertx.core.impl.future.FutureImpl.3.onFailure.FutureImpl.java:153. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", --- }, + --{ action = "drop", level = "WARN" }, + + -- FUCK those damn nonsense spam logs!!! + { action = "drop", file = "Forwarder" }, + { action = "drop", level = "ERROR", file = "HttpClientRequestImpl" }, + { action = "drop", level = "ERROR", file = "BisectClient" }, + + -- Seen: 2024-04-10 prod. + -- Reported 20240410 via "https://github.com/swisspost/vertx-redisques/pull/166" + { action = "drop", file = "RedisQues", level = "WARN", + msgPattern = "^Registration for queue .- has changed to .-$", }, -- Reported: SDCISA-13717 -- Seen: 2024-01-05 prod, 2023-10-18 prod - { action = "drop", file = "LocalHttpServerResponse", level = "ERROR", + { action = "drop", file = "LocalHttpServerResponse", level = "ERROR", msgPattern = "^non%-proper HttpServerResponse occured\r?\n" .."java.lang.IllegalStateException:" .." You must set the Content%-Length header to be the total size of the message body BEFORE sending any data if you are not using" @@ -82,14 +69,6 @@ function loadFilters( that ) .."java.lang.IllegalStateException: You must set the Content%-Length header to be the total size of the message body BEFORE sending" .." any data if you are not using HTTP chunked encoding.", }, --- -- Seen: 2023-10-18 --- -- Opened nsync PR 49 as a first counter measure. --- { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.NullPointerException: null", --- stackStartsWith = "\tat org.swisspush.nsync.multiget.MultiGetServer.lambda$tryLaunchOneRequest$2(MultiGetServer.java:107) ~[nsync-0.6.0.jar:?]" }, - - - -- Bunch of nonsense !ERROR!s which happen all the time as eddies go offline. - -- Seen: 2023-10-18 -- Happens all the time as gateleens error reporting is broken-by-desing. { action = "drop", file = "Forwarder", level = "WARN", @@ -109,116 +88,29 @@ function loadFilters( that ) -- Seen: 2023-10-18 prod { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip Timeout$" }, - -- Seen: 2023-10-18 prod - -- I guess this happens if an eddie tries to put his "backup.zip" via shaky connection. - { action = "drop", file = "FilePutter", level = "ERROR", - msgEquals = "Put file failed:\nio.vertx.core.VertxException: Connection was closed", }, - -- Seen: 2024-01-10 prod, 2023-10-18 prod - -- There are a whole bunch of related errors behind this filter which AFAICT all relate to shaky eddie connections. - { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+" - .." The timeout period of 30000ms has been exceeded while executing [DEGLOPSTU]+ /from.houston/%d+/eagle/[^ ]+ for server eddie%d+:7012$", }, - -- Seen: 2023-10-18 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/[^ ]+ Connection was closed$", }, - -- Seen: 2023-10-18 prod + -- Seen: 2024-04-10 prod, 2023-10-18 prod { action = "drop", file = "ConnectionBase", level = "ERROR", msgEquals = "Connection reset by peer", }, - -- Seen: 2023-10-18 prod + -- Seen: 2024-04-10 prod, 2023-10-18 prod { action = "drop", file = "EventBusBridgeImpl", level = "ERROR", msgEquals = "SockJSSocket exception\nio.vertx.core.VertxException: Connection was closed", }, - -- Seen: 2024-01-05 prod, 2023-10-18 prod + -- Seen: 2024-04-10 prod, 2024-01-05 prod, 2023-10-18 prod -- Reported: TODO link existing issue here { action = "drop", file = "HttpHeaderUtil", level = "ERROR", msgPattern = "Keep%-Alive%} values do not match timeout=42 != timeout=120 for request /googleplex/.*", }, --- -- Seen: 2023-10-18 prod --- -- Reported: --- { action = "drop", file = "Utils", level = "ERROR", --- msgPattern = "^Exception occurred\nio.vertx.core.eventbus.ReplyException: Sync failed.\n" --- .."{\n" --- ..' "countIndexQueries" : 1,\n' --- ..' "countSentBytes" : 119,\n' --- ..' "countReceivedBytes" : 0,\n' --- ..' "countMultiGetRequests" : 0,\n' --- ..' "countPuts" : 0,\n' --- ..' "countDeletes" : 0,\n' --- ..' "durationSeconds" : 0.0,\n' --- ..' "iterationDepth" : 0\n' --- .."}", }, --- --- -- Seen: 2023-10-18 prod --- -- Reported: --- { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: null", --- stackPattern = "^" --- .."\tat org.swisspush.gateleen.core.http.LocalHttpClientRequest.connection.LocalHttpClientRequest.java:754. ~.gateleen.core.[0-9.]+.jar:..\n" --- .."\tat org.swisspush.gateleen.routing.Forwarder.1.lambda.handle.0.Forwarder.java:362. ~.gateleen.routing.[0-9.]+.jar:..\n" --- .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", --- }, - - -- Seen: 2024-01-05 prod, 2023-10-18 prod + -- Seen: 2024-01-05 prod -- Reported: { action = "drop", file = "Utils", level = "ERROR", - msgPattern = "^Exception occurred\nio.vertx.core.eventbus.ReplyException: Timed out after waiting 30000.ms. for a reply. address:" - .." __vertx.reply.[0-9]+, repliedAddress: nsync.reregister.sync/slarti.vehicle.setup.sync.[0-9]+", - }, - - -- Seen: 2024-01-05 prod, 2023-10-18 prod - -- Reported: - { action = "drop", file = "Utils", level = "ERROR", msgPattern = "^Exception occurred\n" - .."io.vertx.core.eventbus.ReplyException: Timed out after waiting 30000.ms. for a reply. address: __vertx.reply.[0-9]+, repliedAddress: nsync.register.sync" }, - - -- Seen: 2023-10-18 prod - { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", - msgEquals = "Connection was closed\nio.vertx.core.VertxException: Connection was closed", }, - - -- Seen: 2023-10-18 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "^..... ................................ http://bistr:8080/bistr/vending/accounting/v1/information/lastSessionEnd Connection was closed$", }, - - -- Seen: 2023-10-18 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "..... ................................ http://bob:8080/bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" - .." The timeout period of 30000ms has been exceeded while executing PUT /bob/vending/transaction/v1/systems/%d+/dates/[0-9-]+/transactions/%d+/start" - .." for server bob:8080", }, - - -- Seen: 2023-10-18 prod - { action = "drop", file = "ContextImpl", level = "ERROR", msgEquals = "Unhandled exception\njava.lang.IllegalStateException: null", - stackStartsWith = "" - .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.checkEnded(HttpClientResponseImpl.java:150) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.http.impl.HttpClientResponseImpl.endHandler(HttpClientResponseImpl.java:172) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler$7(Forwarder.java:476) ~[gateleen-routing-1.3.25.jar:?]\n" - .."\tat io.vertx.core.impl.future.FutureImpl$3.onSuccess(FutureImpl.java:141) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.FutureBase.emitSuccess(FutureBase.java:60) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.FutureImpl.addListener(FutureImpl.java:196) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.PromiseImpl.addListener(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.FutureImpl.onComplete(FutureImpl.java:164) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.impl.future.PromiseImpl.onComplete(PromiseImpl.java:23) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.http.impl.HttpClientRequestBase.response(HttpClientRequestBase.java:240) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat io.vertx.core.http.HttpClientRequest.send(HttpClientRequest.java:330) ~[vertx-core-4.2.1.jar:4.2.1]\n" - .."\tat org.swisspush.gateleen.routing.Forwarder$1.lambda$handle$1(Forwarder.java:377) ~[gateleen-routing-1.3.25.jar:?]\n" - .."\tat org.swisspush.gateleen.core.http.BufferBridge.lambda$pump$0(BufferBridge.java:43) ~[gateleen-core-1.3.25.jar:?]\n" - .."\tat io.vertx.core.impl.AbstractContext.dispatch(AbstractContext.java:100) ~[vertx-core-4.2.1.jar:4.2.1]\n", - }, - - -- Seen: 2023-10-18 prod - -- TODO Push issue to my backlog to fix this. - { action = "drop", file = "ContextImpl", level = "ERROR", - msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.", + msgPattern = "^Exception occurred\njava.lang.Exception: %(TIMEOUT,%-1%) Timed out after waiting 30000%(ms%) for a reply. address: __vertx.reply.%d+, repliedAddress: nsync%-[re]+gister%-sync", stackPattern = "^" - .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:41. ~.gateleen.core.[0-9.]+.jar:..\n" - .."\tat org.swisspush.gateleen.core.http.FastFailHttpServerResponse.drainHandler.FastFailHttpServerResponse.java:24. ~.gateleen.core.[0-9.]+.jar:..\n" - .."\tat org.swisspush.gateleen.logging.LoggingWriteStream.drainHandler.LoggingWriteStream.java:73. ~.gateleen.logging.[0-9.]+.jar:..\n" - .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:95. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" - .."\tat io.vertx.core.streams.impl.PumpImpl.stop.PumpImpl.java:39. ~.vertx.core.[0-9.]+.jar:[0-9.]+]\n" - .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.4.Forwarder.java:494. ~.gateleen.routing.[0-9.]+.jar:..\n" - .."\tat org.swisspush.gateleen.routing.Forwarder.lambda$getAsyncHttpClientResponseHandler.5.Forwarder.java:503. ~.gateleen.routing.[0-9.]+.jar:..\n" - .."\tat io.vertx.core.impl.AbstractContext.dispatch.AbstractContext.java:100. ~.vertx.core.[0-9.]+.jar:[0-9.]+.\n", - }, - - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "^..... ................................ http://thought:8080/thought/vehicleoperation/recording/v1/events The timeout period of 60000ms has been exceeded while executing PUT /thought/vehicleoperation/recording/v1/events for server thought:8080$", - }, + .."%s-at org.swisspush.nsync.NSyncHandler.lambda.onPutClientSyncBody.%d+" + .."%(NSyncHandler.java:%d+%) ..nsync.-at io.vertx.core.impl.future.FutureImpl.%d+.onFailure%(FutureImpl.java:%d+%)" + ..".-" + .."Caused by: io.vertx.core.eventbus.ReplyException: Timed out after waiting 30000%(ms%) for a reply." + .." address: __vertx.reply.%d+, repliedAddress: nsync%-[re]+gister%-sync" + }, -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+" @@ -237,120 +129,57 @@ function loadFilters( that ) -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod. { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection reset by peer$", }, - -- TODO Thought timeout? Can happen. But how often is ok? - -- HINT: Occurred 15 times in 6 hrs (avg 1x per 24min) (2021-09-17_12:00 to 2021-09-17_18:00) - -- Seen 2022-06-20, 2022-08-30 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "http://[aghilmostuwy]+:8080/[aghilmostuwy]+/vehicleoperation/recording/v1/.+ Timeout", }, - - -- [SDCISA-9572] pag - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "http://[8acgilmnpsvwy]+:[78]080/[_aegilmopstwy]+/.+ Connection was closed", }, - - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "t.ch:7022/brox/from/vehicles/.+Connection refused: ", }, - - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = " http://%w+.pnet.ch:7022/brox/info Connection refused: %w+.pnet.ch/[%d.]+:7022" }, - - -- TODO Analyze - -- Observed 20014 times within 6 hrs (~1/sec) (2021-09-17_12:00 to 2021-09-17_18:00) - -- HINT: Eddie connections issues also have around 20000 occurrences. Maybe related? - -- Seen: 2021-09-17 - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "http://eddie%d+:7012/from.houston/[^/]+/eagle/[^ ]+ Response already written. Not sure about the" - .." state. Closing server connection for stability reason", }, - - -- TODO Analyze - -- HINT: Occurred 1538 times in 6 hrs (~ 1x per 15sec) (2021-09-17_12:00 to 2021-09-17_18:00) - { action = "drop", file = "Forwarder", level = "WARN", - msgPattern = "Failed to '[^ ]+ /from%-houston/%d+/eagle/.+'\n.+VertxException: Connection was closed", }, - - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "http://eddie%d+:7012/from%-houston/%d+/eagle/.+ Connection was closed", }, - - -- TODO Analyze - -- Seen: 2021-09-17, ..., 2022-06-20 - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "http://pag:8080/pag/user/information/v1/directory/sync/request Timeout", }, - - -- Seen 2021-10-25, 2022-08-30 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "http://[8acgilmnpsvwy]+:8080/[_aegilmopstwy]+/.+ Response already written. Not sure about the" - .." state. Closing server connection for stability reason", }, - - -- TODO Analyze. - -- Seen 2021-09-17, 2022-06-20 - { action = "drop", file = "BisectClient", level = "WARN", - msgPattern = "statusCode=503 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, - -- Seen 2022-06-20 prod - { action = "drop", file = "BisectClient", level = "WARN", - msgPattern = "statusCode=504 received for POST /houston/routes/vehicles/%d+/eagle/nsync/v1/query%-index", }, - -- TODO rm filter when fixed - -- Reported: SDCISA-9573 - -- Seen: 2022-08-30 prod, 2022-06-20, 2021-09-17 - { action = "drop", file = "BisectClient", level = "WARN", - msgPattern = "Index id=slarti%-vehicle%-setup%-sync%-%d+ rootPath=/houston/from/vehicles/%d+/vehicle/setup/v1 size=%d+ not %(nor no more%) ready. Aborting BisectClient", }, - -- Reported: SDCISA-9574 -- TODO rm when resolved -- Seen: 2021-09-17 2022-06-20, 2022-08-30 prod, { action = "drop", file = "Utils", level = "ERROR", - msgPattern = "Exception occurred\n%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}", }, - - -- TODO Analyze - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "http://preflux:8080/preflux/data/preflux/rollout/hosts/eddie%d+/instances/default/situation Timeout", }, - - -- TODO Analyze. - -- Seen 2022-08-30 prod, 2022-06-20, 2021-09-17 - { action = "drop", file = "RedisQues", level = "WARN", - msgPattern = "Registration for queue .+ has changed to null", }, - - -- Reported: SDCISA-10973 - -- Seen: 2023-10-18 prod. - { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", - msgPattern = "The timeout period of 30000ms has been exceeded while executing PUT /houston/vehicles/[0-9]+" - .."/vehicle/backup/v1/executions/[0-9]+/backup.zip for server localhost:9089", }, - - -- Seen 2024-01-10 prod - { action = "drop", file = "HttpClientRequestImpl", level = "ERROR", - msgPattern = "The timeout period of 30000ms has been exceeded while executing POST /from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles for server eddie%d+:7012" }, - - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "[%a-z0-9]+ [a-z0-9]+ http://eddie.....:7012/from%-houston/[^/]+/eagle/nsync/v1/push/trillian" - .."%-phonebooks%-affiliated%-planning%-area%-[^-]+%-vehicles The timeout period of 30000ms has been" - .." exceeded while executing POST /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks" - .."%-affiliated%-planning%-area%-[^%-]+-vehicles for server eddie.....:7012", }, - - -- Reported: SDCISA-9578 - -- TODO rm when fixed - -- Seen 2022-08-30 prod, 2022-06-20 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" - .." Connection reset by peer", }, - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" - .." Connection was closed", }, - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/navigation/location/v1/position/collected" - .." Response already written. Not sure about the state. Closing server connection for stability reason", }, - - -- TODO analyze - -- Seen 2024-03-20 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = "^..... [a-z0-9]+ http://vhfspa1.pnet.ch:7096/timetable/information/v1/tripinfo%?id%=I%-" }, - - -- TODO analyze - -- Seen 2022-06-20 prod - { action = "drop", file = "Forwarder", level = "ERROR", - msgPattern = " http://vhfspa1.pnet.ch:7022/brox/from/vehicles/[^/]+/timetable/private/v1/trip/state/%w+.xml Connection was closed", }, + msgPattern = "%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}", }, -- TODO analyze -- Seen 2024-03-20 prod { action = "drop", file = "ContextImpl", level = "ERROR", msgPattern = "^Unhandled exception\njava.lang.IllegalStateException: Response head already sent", }, + -- Seen: 2024-04-10 prod. + { action = "drop", level = "ERROR", file = "HttpClientRequestImpl", + msgEquals = "Connection reset by peer\njava.io.IOException: Connection reset by peer", + stackPattern = "^" + .."%s-at sun.nio.ch.FileDispatcherImpl.read0%(.-\n" + .."%s-at sun.nio.ch.SocketDispatcher.read%(.-\n" + .."%s-at sun.nio.ch.IOUtil.readIntoNativeBuffer%(.-\n" + .."%s-at sun.nio.ch.IOUtil.read%(.-\n" + .."%s-at sun.nio.ch.IOUtil.read%(.-\n" + .."%s-at sun.nio.ch.SocketChannelImpl.read%(.-\n" + .."%s-at io.netty.buffer.PooledByteBuf.setBytes%(.-\n" + .."%s-at io.netty.buffer.AbstractByteBuf.writeBytes%(.-\n" + .."%s-at io.netty.channel.socket.nio.NioSocketChannel.doReadBytes%(.-\n" + .."%s-at io.netty.channel.nio.AbstractNioByteChannel.NioByteUnsafe.read%(.-\n" + .."%s-at io.netty.channel.nio.NioEventLoop.processSelectedKey%(.-\n" + .."%s-at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized%(.-\n" + .."%s-at io.netty.channel.nio.NioEventLoop.processSelectedKeys%(.-\n" + .."%s-at io.netty.channel.nio.NioEventLoop.run%(.-\n" + .."%s-at io.netty.util.concurrent.SingleThreadEventExecutor.%d+.run%(.-\n" + .."%s-at io.netty.util.internal.ThreadExecutorMap.%d+.run%(.-\n" + .."%s-at io.netty.util.concurrent.FastThreadLocalRunnable.run%(.-\n" + .."%s-at java.lang.Thread.run%(.-", }, + + -- Seen: 2024-04-10 prod. + { action = "drop", file = "ContextImpl", level = "ERROR", + msgEquals = "Unhandled exception\njava.lang.IllegalStateException: null", + stackPattern = "^" + ..".-io.vertx.-%.HttpClientResponseImpl.checkEnded%(.-\n" + ..".-io.vertx.-%.HttpClientResponseImpl.endHandler%(.-\n" + ..".-gateleen.routing.Forwarder.-\n", }, + + -- Seen: 2024-04-10 prod. + -- TODO get rid of this silly base class. + { action = "drop", file = "ContextImpl", level = "ERROR", + msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.", }, + + -- Seen: 2024-04-10 prod. + -- TODO get rid of this silly base class. + { action = "drop", file = "ContextImpl", level = "ERROR", + msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: null", }, } end @@ -361,7 +190,7 @@ function initFilters( that ) local descr = that.filters[iF] local beforeDate = descr.beforeDate and normalizeIsoDateTime(descr.beforeDate) local afterDate = descr.afterDate and normalizeIsoDateTime(descr.afterDate) - local file, level, msgPattern = descr.file, descr.level, descr.msgPattern + local file, level, msgPattern, msgEquals = descr.file, descr.level, descr.msgPattern, descr.msgEquals local rawPattern, stackPattern = descr.rawPattern, descr.stackPattern local stackStartsWith = descr.stackStartsWith local filter = { action = descr.action, matches = false, } -- cgit v1.1 From 6109000c870b6464c2cb198598a89d73fbbd3c52 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 11 Apr 2024 16:13:53 +0200 Subject: Continued a bit on FindFullDisks.c --- src/main/c/paisa-fleet/FindFullDisks.c | 154 ++++++++++++++++++++++----------- 1 file changed, 105 insertions(+), 49 deletions(-) diff --git a/src/main/c/paisa-fleet/FindFullDisks.c b/src/main/c/paisa-fleet/FindFullDisks.c index 50f7b90..492f983 100644 --- a/src/main/c/paisa-fleet/FindFullDisks.c +++ b/src/main/c/paisa-fleet/FindFullDisks.c @@ -1,11 +1,25 @@ -/* TODO move compile cmd somewhere better maybe? +#if 0 -true \ - && CFLAGS="-Wall -Werror -pedantic -ggdb -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function" \ - && ${CC:?} -o build/bin/findfulldisks $CFLAGS src/main/c/paisa-fleet/FindFullDisks.c -Isrc/main/c -Iimport/include -Limport/lib -lgarbage -lpthread \ +true `# configure FindFullDisks for NORMAL systems` \ + && CC=gcc \ + && MKDIR_P="mkdir -p" \ + && CFLAGS="-Wall -Werror -pedantic -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function -Isrc/main/c -Iimport/include" \ + && LDFLAGS="-Wl,-dn,-lgarbage,-dy,-lpthread,-lws2_w32,-Limport/lib" \ && true -*/ +true `# configure FindFullDisks for BROKEN systems` \ + && CC=x86_64-w64-mingw32-gcc \ + && MKDIR_P="mkdir -p" \ + && CFLAGS="-Wall -Werror -pedantic -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function -Isrc/main/c -Iimport/include" \ + && LDFLAGS="-Wl,-dn,-lgarbage,-dy,-lws2_32,-Limport/lib" \ + && true + +true `# make FindFullDisks` \ + && ${MKDIR_P:?} build/bin \ + && ${CC:?} -o build/bin/findfulldisks $CFLAGS src/main/c/paisa-fleet/FindFullDisks.c $LDFLAGS \ + && true + +#endif #include #include @@ -14,6 +28,15 @@ true \ #include "Garbage.h" +#if !NDEBUG +# define REGISTER register +# define LOGDBG(...) fprintf(stderr, __VA_ARGS__) +#else +# define REGISTER +# define LOGDBG(...) +#endif +#define LOGERR(...) fprintf(stderr, __VA_ARGS__) + typedef struct FindFullDisks FindFullDisks; typedef struct Device Device; @@ -22,10 +45,13 @@ typedef struct Device Device; struct FindFullDisks { char *sshUser; int sshPort; - struct GarbageEnv **garb; + int maxParallel, numInProgress; + struct GarbageEnv **env; struct Garbage_Process **child; int devices_len; Device *devices; + int iDevice; /* Next device to be triggered. */ + int exitCode; }; @@ -36,11 +62,15 @@ struct Device { }; +/*BEG fwd decls*/ +static void beginNextDevice( void* ); +/*END fwd decls*/ + + static void Child_onStdout( const char*buf, int buf_len, void*cls ){ - //struct FindFullDisks*const app = cls; - //fprintf(stderr, "[TRACE] %s(buf, %d, cls)\n", __func__, buf_len); + //FindFullDisks*const app = cls; if( buf_len > 0 ){ /*another chunk*/ - fprintf(stdout, "%.*s", buf_len, buf); + printf("%.*s", buf_len, buf); }else{ /*EOF*/ assert(buf_len == 0); } @@ -48,15 +78,19 @@ static void Child_onStdout( const char*buf, int buf_len, void*cls ){ static void Child_onJoined( int retval, int exitCode, int sigNum, void*cls ){ - //struct FindFullDisks*const app = cls; - fprintf(stderr, "[TRACE] %s(%d, %d, %d)\n", __func__, retval, exitCode, sigNum); + FindFullDisks*const app = cls; + LOGDBG("[TRACE] %s(%d, %d, %d)\n", __func__, retval, exitCode, sigNum); + assert(app->numInProgress > 0); + app->numInProgress -= 1; + //LOGDBG("[DEBUG] numInProgress decremented is now %d\n", app->numInProgress); + (*app->env)->enqueBlocking(app->env, beginNextDevice, app); } -static void visitDevice( struct FindFullDisks*app, const Device*device ){ +static void visitDevice( FindFullDisks*app, const Device*device ){ assert(device != NULL); - fprintf(stderr, "[TRACE] %s \"%s\" (behind \"%s\")\n", __func__, - device->hostname, device->eddieName); + assert(device < app->devices + app->devices_len); + LOGERR("\n[INFO ] %s \"%s\" (behind \"%s\")\n", __func__, device->hostname, device->eddieName); int err; char eddieCmd[2048]; err = snprintf(eddieCmd, sizeof eddieCmd, "true" @@ -96,10 +130,10 @@ static void visitDevice( struct FindFullDisks*app, const Device*device ){ "--", "sh", "-c", eddieCmd, NULL }; - //fprintf(stderr, "CMDLINE:"); - //for( int i = 0 ; childArgv[i] != NULL ; ++i ) fprintf(stderr, " \"%s\"", childArgv[i]); - //fprintf(stderr, "\n\n"); - app->child = (*app->garb)->newProcess(app->garb, &(struct Garbage_Process_Mentor){ + //LOGDBG("CMDLINE:"); + //for( int i = 0 ; childArgv[i] != NULL ; ++i ) LOGDBG(" \"%s\"", childArgv[i]); + //LOGDBG("\n\n"); + app->child = (*app->env)->newProcess(app->env, &(struct Garbage_Process_Mentor){ .cls = app, .usePathSearch = !0, .argv = childArgv, @@ -112,53 +146,75 @@ static void visitDevice( struct FindFullDisks*app, const Device*device ){ } -static void startApp( void*cls ){ - struct FindFullDisks *app = cls; - for( int i = 0 ; i < app->devices_len ; ++i ){ - visitDevice(app, app->devices + i); +static void beginNextDevice( void*cls ){ + FindFullDisks *app = cls; +maybeBeginAnotherOne: + if( app->numInProgress >= app->maxParallel ){ + LOGDBG("[DEBUG] Already %d/%d in progress. Do NOT trigger more for now.\n", + app->numInProgress, app->maxParallel); + goto endFn; + } + if( app->iDevice >= app->devices_len ){ + LOGDBG("[INFO ] All %d devices started\n", app->iDevice); + goto endFn; } + assert(app->iDevice >= 0 && app->iDevice < INT_MAX); + app->iDevice += 1; + assert(app->numInProgress >= 0 && app->numInProgress < INT_MAX); + app->numInProgress += 1; + visitDevice(app, app->devices + app->iDevice - 1); + goto maybeBeginAnotherOne; +endFn:; } static void setupExampleDevices( FindFullDisks*app ){ - app->devices_len = 1; - app->devices = realloc(NULL, app->devices_len*sizeof*app->devices); - assert(app->devices != NULL || !"ENOMEM"); - /**/ - strcpy(app->devices[0].hostname, "fook-12345"); - strcpy(app->devices[0].eddieName, "eddie09815"); - strcpy(app->devices[0].lastSeen, "2023-12-31T23:59:59"); + #define DEVICES_CAP 3 + app->devices_len = 0; + app->devices = malloc(DEVICES_CAP*sizeof*app->devices); + assert(app->devices != NULL || !"malloc fail"); /**/ -// strcpy(app->devices[1].hostname, "fook-67890"); -// strcpy(app->devices[1].eddieName, "eddie12345"); -// strcpy(app->devices[1].lastSeen, "2023-12-31T23:42:42"); +// strcpy(app->devices[app->devices_len].eddieName, "eddie09815"); +// strcpy(app->devices[app->devices_len].hostname, "fook-12345"); +// strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:59"); +// app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); // /**/ -// strcpy(app->devices[2].hostname, "lunkwill-12345"); -// strcpy(app->devices[2].eddieName, "eddie09845"); -// strcpy(app->devices[2].lastSeen, "2023-12-31T23:59:42"); +// strcpy(app->devices[app->devices_len].eddieName, "eddie12345"); +// strcpy(app->devices[app->devices_len].hostname, "fook-67890"); +// strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:42:42"); +// app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); // /**/ + strcpy(app->devices[app->devices_len].eddieName, "eddie09845"); + strcpy(app->devices[app->devices_len].hostname, "lunkwill-0005b7ec98a9"); + strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); + app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); + /**/ + strcpy(app->devices[app->devices_len].eddieName, "eddie00002"); + strcpy(app->devices[app->devices_len].hostname, "lunkwill-FACEBOOKBABE"); + strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); + app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); + /**/ + #undef DEVICES_CAP } int main( int argc, char**argv ){ static union{ void*align; char space[SIZEOF_struct_GarbageEnv]; } garbMemory; - FindFullDisks app = { - .sshUser = "brĂĽnzli", - .sshPort = 22, - .garb = NULL, - .child = NULL, - .devices_len = 0, - .devices = NULL, - }; - setupExampleDevices(&app); - app.garb = GarbageEnv_ctor(&(struct GarbageEnv_Mentor){ + FindFullDisks app = {0}; assert((void*)0 == NULL); + #define app (&app) + app->sshUser = "isa" ;// "brĂĽnzli"; + app->sshPort = 7022 ;// 22; + app->maxParallel = 1; + setupExampleDevices(app); + app->env = GarbageEnv_ctor(&(struct GarbageEnv_Mentor){ .memBlockToUse = &garbMemory, .memBlockToUse_sz = sizeof garbMemory, }); - assert(app.garb != NULL); - (*app.garb)->enqueBlocking(app.garb, startApp, &app); - (*app.garb)->runUntilDone(app.garb); - return 0; + assert(app->env != NULL); + (*app->env)->enqueBlocking(app->env, beginNextDevice, app); + (*app->env)->runUntilDone(app->env); + return !!app->exitCode; + #undef app } -- cgit v1.1 From 98cac05a8c3c26eff81d89e9980d376cda00f194 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 11 Apr 2024 17:04:33 +0200 Subject: FindFullDisks.c add help. --- src/main/c/paisa-fleet/FindFullDisks.c | 73 +++++++++++++++++++++++++++++++--- 1 file changed, 67 insertions(+), 6 deletions(-) diff --git a/src/main/c/paisa-fleet/FindFullDisks.c b/src/main/c/paisa-fleet/FindFullDisks.c index 492f983..6029233 100644 --- a/src/main/c/paisa-fleet/FindFullDisks.c +++ b/src/main/c/paisa-fleet/FindFullDisks.c @@ -28,6 +28,8 @@ true `# make FindFullDisks` \ #include "Garbage.h" +#define FLG_isHelp (1<<0) + #if !NDEBUG # define REGISTER register # define LOGDBG(...) fprintf(stderr, __VA_ARGS__) @@ -43,7 +45,8 @@ typedef struct Device Device; struct FindFullDisks { - char *sshUser; + int flg; + const char *sshUser; int sshPort; int maxParallel, numInProgress; struct GarbageEnv **env; @@ -67,6 +70,54 @@ static void beginNextDevice( void* ); /*END fwd decls*/ +static void printHelp( void ){ + printf("%s%s%s", " \n" + " ", strrchr(__FILE__,'/')+1, "\n" + " \n" + " Expected format on stdin is:\n" + " \n" + " eddie00042 lunkwill-ABBABEAFABBA \n" + " ...\n" + " \n" + " Options:\n" + " \n" + " --sshUser \n" + " \n" + " --sshPort \n" + " \n"); +} + + +static int parseArgs( int argc, char**argv, FindFullDisks*app ){ + int iA = 1; + app->sshUser = NULL; + app->sshPort = 22; + app->maxParallel = 1; +nextArg:; + const char *arg = argv[iA++]; + if( arg == NULL ) goto validateArgs; + if( !strcmp(arg, "--help")){ + app->flg |= FLG_isHelp; return 0; + }else if( !strcmp(arg, "--sshUser")){ + arg = argv[iA++]; + if( arg == NULL ){ LOGERR("EINVAL: Arg --sshUser needs value\n"); return -1; } + app->sshUser = arg; + }else if( !strcmp(arg, "--sshPort")){ + arg = argv[iA++]; + if( arg == NULL ){ LOGERR("EINVAL: Arg --sshPort needs value\n"); return -1; } + errno = 0; + app->sshPort = strtol(arg, NULL, 0); + if( errno ){ LOGERR("EINVAL: --sshPort %s\n", arg); return -1; } + }else{ + LOGERR("EINVAL: %s\n", arg); + } + goto nextArg; +validateArgs:; + if( app->sshUser == NULL ){ LOGERR("EINVAL: Arg --sshUser missing\n"); return -1; } + return 0; +} + + static void Child_onStdout( const char*buf, int buf_len, void*cls ){ //FindFullDisks*const app = cls; if( buf_len > 0 ){ /*another chunk*/ @@ -169,7 +220,7 @@ endFn:; static void setupExampleDevices( FindFullDisks*app ){ - #define DEVICES_CAP 3 + #define DEVICES_CAP 42 app->devices_len = 0; app->devices = malloc(DEVICES_CAP*sizeof*app->devices); assert(app->devices != NULL || !"malloc fail"); @@ -185,6 +236,11 @@ static void setupExampleDevices( FindFullDisks*app ){ // app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); // /**/ strcpy(app->devices[app->devices_len].eddieName, "eddie09845"); + strcpy(app->devices[app->devices_len].hostname, "fook"); + strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); + app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); + /**/ + strcpy(app->devices[app->devices_len].eddieName, "eddie09845"); strcpy(app->devices[app->devices_len].hostname, "lunkwill-0005b7ec98a9"); strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); @@ -194,6 +250,11 @@ static void setupExampleDevices( FindFullDisks*app ){ strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); /**/ + strcpy(app->devices[app->devices_len].eddieName, "eddie00002"); + strcpy(app->devices[app->devices_len].hostname, "fook"); + strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); + app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); + /**/ #undef DEVICES_CAP } @@ -202,10 +263,9 @@ int main( int argc, char**argv ){ static union{ void*align; char space[SIZEOF_struct_GarbageEnv]; } garbMemory; FindFullDisks app = {0}; assert((void*)0 == NULL); #define app (&app) - app->sshUser = "isa" ;// "brĂĽnzli"; - app->sshPort = 7022 ;// 22; - app->maxParallel = 1; - setupExampleDevices(app); + if( parseArgs(argc, argv, app) ){ app->exitCode = -1; goto endFn; } + if( app->flg & FLG_isHelp ){ printHelp(); goto endFn; } + //setupExampleDevices(app); app->env = GarbageEnv_ctor(&(struct GarbageEnv_Mentor){ .memBlockToUse = &garbMemory, .memBlockToUse_sz = sizeof garbMemory, @@ -213,6 +273,7 @@ int main( int argc, char**argv ){ assert(app->env != NULL); (*app->env)->enqueBlocking(app->env, beginNextDevice, app); (*app->env)->runUntilDone(app->env); +endFn: return !!app->exitCode; #undef app } -- cgit v1.1 From 49eb68fcf6e5ed1ff841a8bf5bf5978a4d67f412 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 12 Apr 2024 11:59:49 +0200 Subject: Add article draft --- .../vertx-promise-errorhandling.txt | 199 +++++++++++++++++++++ 1 file changed, 199 insertions(+) create mode 100644 doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt diff --git a/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt b/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt new file mode 100644 index 0000000..430740e --- /dev/null +++ b/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt @@ -0,0 +1,199 @@ + +How to do error handling with java vertx Proimises + +Example library +```java + public class ExampleLib { + public Promise fetchPerson(); + public Promise fetchPhone(String personId); + } +``` + +The way to go (HINT still suffers from callback-hell, use method refs): + +```java + exampleLib.fetchPerson().onComplete( personEv -> { + if( personEv.failed() ){ + log.error("TODO error handling", new UnsupportedOperationException(personEv.cause())); + return; + } + Person person = personEv.result(); + exampleLib.fetchPhone().onComplete( phoneEv -> { + if( phoneEv.failed() ){ + log.error("TODO error handling", new UnsupportedOperationException(phoneEv.cause())); + return; + } + Phone phone = phoneEv.result(); + // 'person' and 'phone' are now ready to use. + // .... + }); + }); +``` + + +Why not using onSuccess().onFailure() ? + +What I've found often in PRODUCTION code is the following: + +```java + exampleLib.fetchPerson().onSuccess( person -> exampleLib.fetchPhone(person.id, phone -> { + log.info("yayy! :) we got {} and {}", person, phone); + })).onFailure( ex -> { + log.error("whops :(", ex); + }); +``` + +Then I hear stuff like "Yeah is this cool. It is so short and concise. This +framebloat is great". BUT HERE'S THE CATCH: This code has a major flaw. Do you +spot it? It is a VERY simple example so we should spot such a major flaw +immediately. Just imagine a somewhat more realistic example whith tons of more +code around it and it will become even harder to ever spot what is wrong here. + +Couldn't find the flaw yet? To make it more obvious I'll rewrite the code to +good old synchronous, blocking, stone-age code, removing all that fancy +framework clutter. This will help us to SEE immediately what the problem is: + +```java + try{ + Person person = exampleLib.fetchPerson(); + try{ + Phone phone = exampleLib.fetchPhone(); + }catch( Throwable ex ){ + return; // HINT: This line should be your eye-catcher. + } + log.info("yayy! :) we got {} and {}", person, phone); + }catch( Throwable ex ){ + log.error("whops :(", ex); + } +``` + +Take a look at the inner catch block. Now every serious dev should see something +terrifying. It is catching EVERY kind of Throwables and JUST COMPLETELY IGNORES +THEM! This is exactly what the previous code does. The problem is, that even +experienced developers have a hard time to spot this. I say this, because in +nearly every pull-request I've to review, I spot this issue dozens of times +every few lines of code. This is terrifying for hunting bugs later. + + +Why not just using global error handler? + +Lazy devs may argue with DRY, that we should just register a global handler and +then just ignore all error handling everywher else. + +I think it is a good idea to register the global error handler. BUT we have to +consider the folowing: + +- If we're writing a library, we MUST NOT steal the error handler. This is the applications job. +- We still MUST have error handlers for every individual case. + +Why not registering from a lib? Bcause that error handler is GLOBAL to our +vertx context, and we will make it hard to use our library, because the +application cannot rely on this feature anymore, as all those fancy +dependencies are going to fight each other by overriding that handler with +their own one. Which makes the catch-all error handler pretty useless for the +application. + +So then why not rely on the application setting this handler? Good APIs are +easy to use right and hard to use wrong. Silently putting this responsibility +to the application as an option is an example of how to provocate wrong usage +of the library. So if a library does this, it just degrades the quality of the +libraries API. And usage of libraries with bad APIs should be if you would like +to prevent your future yourself unneccessary headache during maintenance later. + +So then why we still need individual handlers everywhere, even with a global +catch-all handler registered? The only thing such a global handler is good for, +is to make visible that error handling is missing SOMEWHERE. And this is the +problem. As the stack traces likely will NOT contain the location where you +have to apply a fix. You still have to guess where it could be. Here an example +code which needs to be fixed, but we do NOT yet know that it is the code, +because we only see the exception in the log and do NOT know where it should be +handled: + +```java + public Promise getOrCreatePhone(Id personId, Phone addIfMissing){ + var onDone = Promise.promise(); + exampleLib.fetchPerson(personId).onSuccess( person -> exampleLib.fetchPhone(person.id).onSuccess( phone -> { + onDone.complete(phone); + })).onFailure(onDone::fail); + return onDone.future(); + } +``` + +Whops this code accidentally misses that it should assign the new phone to the +person if there is no phone assigned yet. Unluckily we do NOT know this yet, as +this code is in production and we just found this error in our logs, maybe in +combination with some complaining customers that they get HTTP response +timeouts without any further details. With luck we somewhen randomly stumble +over this error. Notihng indicates a HTTP 500 nor any timeout. Just looks +unrelated to what we're searching. Exactly as all the other 100'000 usless +error logs in there. + +``` + foo.bar.HttpNotFoundException: 404 + at HttpLib.doGet0() + at HttpLib.doGet() + at HttpLib.get() + at EventLoop.runTask() + at EventLoop.nextTask() + at Thread.run() +``` + +So now how would you like to find out where this stack is coming from? There is +NOT EVEN ONE stacktrace line pointing to any of our application code at all. So +the only option left is to GUESS where it could come from. And both of the +message and the stacktrace are pretty useless. + +Thereofore: Just add this fu**ing error handling. It is not that hard: + +```java + public Promise getOrCreatePhone(Id personId, Phone addIfMissing){ + var onDone = Promise.promise(); + exampleLib.fetchPerson(personId, personEv -> { + if( personEv.failed() ){ + onDone.fail(new UnsupportedOperationException("TODO error handling", personEv.cause())); + return; + } + Person person = personEv.result(); + exampleLib.fetchPhone(person.id, phoneEv -> { + if( phoneEv.failed() ){ + onDone.fail(new UnsupportedOperationException("TODO error handling", phoneEv.cause())); + return; + } + onDone.complete(phone); + }); + }); + return onDone.future(); + } +``` + +Now the stack will look somewhat like below, where the very first line points +you straight to where the problem has to be fixed. + +``` + UnsupportedOperationException: TODO error handling + at getOrCreatePhone$lambda$1() line 42 + at EventLoop.runTask() + at EventLoop.nextTask() + at Thread.run() + Caused by: foo.bar.HttpNotFoundException: 404 + at HttpLib.doGet0() + at HttpLib.doGet() + at HttpLib.get() + at EventLoop.runTask() + at EventLoop.nextTask() + at Thread.run() +``` + +For those who now start yelling around "urgh, this code is soo long". Here's your choise: + +- Write short code which does NOT TELL YOU WHAT IS HAPPENING and gives bugs + many places to play hide-and-seek. Aka happy maintenance nightmare. +- Write some more code which makes it hard for bugs to hide. + + +Initialized: 2024-04-12 +LastModified: 2024-04-12 +Published: +Author and Copyright: Andreas Fankhauser + + -- cgit v1.1 From cb1664d8b8dbfece9b185c1d414f16ace44908db Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 12 Apr 2024 19:25:10 +0200 Subject: Read devices to visit from stdin CSV. --- .../vertx-promise-errorhandling.txt | 2 +- doc/note/links/links.txt | 3 + doc/note/openshift/dbg-mem-issues.txt | 3 +- src/main/c/paisa-fleet/FindFullDisks.c | 144 +++++++++++++-------- 4 files changed, 99 insertions(+), 53 deletions(-) diff --git a/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt b/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt index 430740e..107563e 100644 --- a/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt +++ b/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt @@ -187,7 +187,7 @@ you straight to where the problem has to be fixed. For those who now start yelling around "urgh, this code is soo long". Here's your choise: - Write short code which does NOT TELL YOU WHAT IS HAPPENING and gives bugs - many places to play hide-and-seek. Aka happy maintenance nightmare. + many places to play hide-and-seek with you. Aka happy maintenance nightmare. - Write some more code which makes it hard for bugs to hide. diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 78e045a..53ea66e 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -585,3 +585,6 @@ Links (Aka argument amplifiers) - [public service announcement](https://xkcd.com/1179/) - [3 simple rules](https://dev.to/corykeane/3-simple-rules-for-effectively-handling-dates-and-timezones-1pe0) +## (TODO put to a better place in here) +- [how to handle vertx promise fail/errors properly](https://github.com/swisspost/vertx-redisques/pull/164#discussion_r1562105007) + diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index 16e574d..59afa2f 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -112,7 +112,8 @@ true `# Get made heap dump` \ && true true `# Probe` \ - && logFile="readyness-probe.log" \ + && stage=prod \ + && logFile="readyness-houston-${stage:?}-$(date -u +%Y%m%d-%H%M%SZ).log" \ && printf 'c; when ;rspCode; connectSec; trsfSec; totlSec; curlExit\n' | tee -a "${logFile:?}" \ && while true; do true \ && printf 'r;%s;%7d;%11.3f;%8.3f;%8.3f;%9d\n' \ diff --git a/src/main/c/paisa-fleet/FindFullDisks.c b/src/main/c/paisa-fleet/FindFullDisks.c index 6029233..8ac39f1 100644 --- a/src/main/c/paisa-fleet/FindFullDisks.c +++ b/src/main/c/paisa-fleet/FindFullDisks.c @@ -4,14 +4,14 @@ true `# configure FindFullDisks for NORMAL systems` \ && CC=gcc \ && MKDIR_P="mkdir -p" \ && CFLAGS="-Wall -Werror -pedantic -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function -Isrc/main/c -Iimport/include" \ - && LDFLAGS="-Wl,-dn,-lgarbage,-dy,-lpthread,-lws2_w32,-Limport/lib" \ + && LDFLAGS="-Wl,-dn,-lgarbage,-lcJSON,-lexpat,-lmbedtls,-lmbedx509,-lmbedcrypto,-dy,-lpthread,-lws2_w32,-Limport/lib" \ && true true `# configure FindFullDisks for BROKEN systems` \ && CC=x86_64-w64-mingw32-gcc \ && MKDIR_P="mkdir -p" \ && CFLAGS="-Wall -Werror -pedantic -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function -Isrc/main/c -Iimport/include" \ - && LDFLAGS="-Wl,-dn,-lgarbage,-dy,-lws2_32,-Limport/lib" \ + && LDFLAGS="-Wl,-dn,-lgarbage,-lcJSON,-lexpat,-lmbedtls,-lmbedx509,-lmbedcrypto,-dy,-lws2_32,-Limport/lib" \ && true true `# make FindFullDisks` \ @@ -50,9 +50,12 @@ struct FindFullDisks { int sshPort; int maxParallel, numInProgress; struct GarbageEnv **env; + struct Garbage_CsvIStream **csvSrc; struct Garbage_Process **child; - int devices_len; + char *inBuf; + int inBuf_cap, inBuf_len; Device *devices; + int devices_cap, devices_cnt; int iDevice; /* Next device to be triggered. */ int exitCode; }; @@ -61,12 +64,12 @@ struct FindFullDisks { struct Device { char hostname[sizeof"lunkwill-0123456789AB"]; char eddieName[sizeof"eddie12345"]; - char lastSeen[sizeof"2023-12-31T23:59:59"]; }; /*BEG fwd decls*/ static void beginNextDevice( void* ); +static void feedNextChunkFromStdinToCsvParser( void* ); /*END fwd decls*/ @@ -118,6 +121,9 @@ validateArgs:; } +static void no_op( void*_ ){} + + static void Child_onStdout( const char*buf, int buf_len, void*cls ){ //FindFullDisks*const app = cls; if( buf_len > 0 ){ /*another chunk*/ @@ -140,7 +146,7 @@ static void Child_onJoined( int retval, int exitCode, int sigNum, void*cls ){ static void visitDevice( FindFullDisks*app, const Device*device ){ assert(device != NULL); - assert(device < app->devices + app->devices_len); + assert(device < app->devices + app->devices_cnt); LOGERR("\n[INFO ] %s \"%s\" (behind \"%s\")\n", __func__, device->hostname, device->eddieName); int err; char eddieCmd[2048]; @@ -205,8 +211,8 @@ maybeBeginAnotherOne: app->numInProgress, app->maxParallel); goto endFn; } - if( app->iDevice >= app->devices_len ){ - LOGDBG("[INFO ] All %d devices started\n", app->iDevice); + if( app->iDevice >= app->devices_cnt ){ + LOGDBG("[INFO ] Work on %d devices triggered. No more devices to trigger.\n", app->iDevice); goto endFn; } assert(app->iDevice >= 0 && app->iDevice < INT_MAX); @@ -219,59 +225,95 @@ endFn:; } -static void setupExampleDevices( FindFullDisks*app ){ - #define DEVICES_CAP 42 - app->devices_len = 0; - app->devices = malloc(DEVICES_CAP*sizeof*app->devices); - assert(app->devices != NULL || !"malloc fail"); - /**/ -// strcpy(app->devices[app->devices_len].eddieName, "eddie09815"); -// strcpy(app->devices[app->devices_len].hostname, "fook-12345"); -// strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:59"); -// app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); -// /**/ -// strcpy(app->devices[app->devices_len].eddieName, "eddie12345"); -// strcpy(app->devices[app->devices_len].hostname, "fook-67890"); -// strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:42:42"); -// app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); -// /**/ - strcpy(app->devices[app->devices_len].eddieName, "eddie09845"); - strcpy(app->devices[app->devices_len].hostname, "fook"); - strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); - app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); - /**/ - strcpy(app->devices[app->devices_len].eddieName, "eddie09845"); - strcpy(app->devices[app->devices_len].hostname, "lunkwill-0005b7ec98a9"); - strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); - app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); - /**/ - strcpy(app->devices[app->devices_len].eddieName, "eddie00002"); - strcpy(app->devices[app->devices_len].hostname, "lunkwill-FACEBOOKBABE"); - strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); - app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); - /**/ - strcpy(app->devices[app->devices_len].eddieName, "eddie00002"); - strcpy(app->devices[app->devices_len].hostname, "fook"); - strcpy(app->devices[app->devices_len].lastSeen, "2023-12-31T23:59:42"); - app->devices_len += 1; assert(app->devices_len < DEVICES_CAP); - /**/ - #undef DEVICES_CAP +static void onCsvRow( struct Garbage_CsvIStream_BufWithLength*row, int numCols, void*cls ){ + REGISTER int err; + FindFullDisks *app = cls; + if( app->exitCode ) return; + if( numCols != 2 ){ + LOGERR("[ERROR] Expected 2 column in input CSV but found %d\n", numCols); + app->exitCode = -1; return; + } + if( app->devices_cap <= app->devices_cnt ){ + app->devices_cap += 4096; + void *tmp = realloc(app->devices, app->devices_cap*sizeof*app->devices); + if( tmp == NULL ) assert(!"TODO_c04CAJtRAgDYWQIAm10CAOAeAgA0KgIA"); + app->devices = tmp; + } + #define DEVICE (app->devices + app->devices_cnt) + if( row[0].len >= sizeof DEVICE->eddieName ){ + LOGERR("[ERROR] eddieName too long: len=%d\n", row[0].len); + app->exitCode = -1; return; + } + if( row[1].len >= sizeof DEVICE->hostname ){ + LOGERR("[ERROR] hostname too long: len=%d\n", row[1].len); + app->exitCode = -1; return; + } + memcpy(DEVICE->eddieName, row[0].buf, row[0].len); + DEVICE->eddieName[row[0].len] = '\0'; + memcpy(DEVICE->hostname, row[1].buf, row[1].len); + DEVICE->hostname[row[1].len] = '\0'; + #undef DEVICE + app->devices_cnt += 1; +} + + +static void onCsvParserCloseSnkDone( int retval, void*app_ ){ + FindFullDisks *app = app_; + LOGDBG("[DEBUG] Found %d devices in input.\n", app->devices_cnt); + (*app->env)->enqueBlocking(app->env, beginNextDevice, app); +} + + +static void onCsvParserWriteDone( int retval, void*cls ){ + FindFullDisks *app = cls; + if( retval <= 0 ) assert(!"TODO_bD0CAO1tAgDaNgIACzcCAIsOAgBkXgIA"); + (*app->env)->enqueBlocking(app->env, feedNextChunkFromStdinToCsvParser, app); +} + + +static void feedNextChunkFromStdinToCsvParser( void*cls ){ + REGISTER int err; + FindFullDisks *app = cls; + if( app->exitCode ) return; + #define SRC (stdin) + if( app->inBuf == NULL || app->inBuf_cap < 1024 ){ + app->inBuf_cap = 1<<15; + void *tmp = realloc(app->inBuf, app->inBuf_cap*sizeof*app->inBuf);; + if( tmp == NULL ){ assert(!"TODO_TT8CAGQLAgCoawIA9jgCANA6AgBTaAIA"); } + app->inBuf = tmp; + } + err = fread(app->inBuf, 1, app->inBuf_cap, SRC); + if( err <= 0 ){ + (*app->csvSrc)->closeSnk(app->csvSrc, onCsvParserCloseSnkDone, app); + return; + } + app->inBuf_len = err; + (*app->csvSrc)->write(app->inBuf, app->inBuf_len, app->csvSrc, onCsvParserWriteDone, app); + #undef SRC +} + + +static void initCsvParserForDeviceListOnStdin( void*cls ){ + FindFullDisks *app = cls; + static struct Garbage_CsvIStream_Mentor csvMentor = { + .onCsvRow = onCsvRow, + .onCsvDocEnd = no_op, + }; + static struct Garbage_CsvIStream_Opts csvOpts = { .delimCol = ';' }; + app->csvSrc = (*app->env)->newCsvIStream(app->env, &csvOpts, &csvMentor, app); + feedNextChunkFromStdinToCsvParser(app); } int main( int argc, char**argv ){ - static union{ void*align; char space[SIZEOF_struct_GarbageEnv]; } garbMemory; + void *envMemory[SIZEOF_struct_GarbageEnv/sizeof(void*)]; FindFullDisks app = {0}; assert((void*)0 == NULL); #define app (&app) if( parseArgs(argc, argv, app) ){ app->exitCode = -1; goto endFn; } if( app->flg & FLG_isHelp ){ printHelp(); goto endFn; } - //setupExampleDevices(app); - app->env = GarbageEnv_ctor(&(struct GarbageEnv_Mentor){ - .memBlockToUse = &garbMemory, - .memBlockToUse_sz = sizeof garbMemory, - }); + app->env = GarbageEnv_ctor(envMemory, sizeof envMemory); assert(app->env != NULL); - (*app->env)->enqueBlocking(app->env, beginNextDevice, app); + (*app->env)->enqueBlocking(app->env, initCsvParserForDeviceListOnStdin, app); (*app->env)->runUntilDone(app->env); endFn: return !!app->exitCode; -- cgit v1.1 From e04a4d2137681c33c9ef81a677166b9a49e34687 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 12 Apr 2024 19:43:31 +0200 Subject: FindFullDisks.c make maxParallel configurable via cli. --- src/main/c/paisa-fleet/FindFullDisks.c | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/main/c/paisa-fleet/FindFullDisks.c b/src/main/c/paisa-fleet/FindFullDisks.c index 8ac39f1..ac494f2 100644 --- a/src/main/c/paisa-fleet/FindFullDisks.c +++ b/src/main/c/paisa-fleet/FindFullDisks.c @@ -87,6 +87,10 @@ static void printHelp( void ){ " --sshUser \n" " \n" " --sshPort \n" + " Default: 22\n" + " \n" + " --maxParallel \n" + " Default 1.\n" " \n"); } @@ -111,6 +115,12 @@ nextArg:; errno = 0; app->sshPort = strtol(arg, NULL, 0); if( errno ){ LOGERR("EINVAL: --sshPort %s\n", arg); return -1; } + }else if( !strcmp(arg, "--maxParallel")){ + arg = argv[iA++]; + if( arg == NULL ){ LOGERR("EINVAL: Arg --maxParallel needs value\n"); return -1; } + errno = 0; + app->maxParallel = strtol(arg, NULL, 0); + if( errno ){ LOGERR("EINVAL: --maxParallel %s\n", arg); return -1; } }else{ LOGERR("EINVAL: %s\n", arg); } -- cgit v1.1 From ea915518ebcf4c0150aa25fab203515855bf1350 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Mon, 15 Apr 2024 14:50:33 +0200 Subject: Moved article draft away. --- .../vertx-promise-errorhandling.txt | 199 --------------------- 1 file changed, 199 deletions(-) delete mode 100644 doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt diff --git a/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt b/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt deleted file mode 100644 index 107563e..0000000 --- a/doc/article-drafts/vertx-promise-errorhandling/vertx-promise-errorhandling.txt +++ /dev/null @@ -1,199 +0,0 @@ - -How to do error handling with java vertx Proimises - -Example library -```java - public class ExampleLib { - public Promise fetchPerson(); - public Promise fetchPhone(String personId); - } -``` - -The way to go (HINT still suffers from callback-hell, use method refs): - -```java - exampleLib.fetchPerson().onComplete( personEv -> { - if( personEv.failed() ){ - log.error("TODO error handling", new UnsupportedOperationException(personEv.cause())); - return; - } - Person person = personEv.result(); - exampleLib.fetchPhone().onComplete( phoneEv -> { - if( phoneEv.failed() ){ - log.error("TODO error handling", new UnsupportedOperationException(phoneEv.cause())); - return; - } - Phone phone = phoneEv.result(); - // 'person' and 'phone' are now ready to use. - // .... - }); - }); -``` - - -Why not using onSuccess().onFailure() ? - -What I've found often in PRODUCTION code is the following: - -```java - exampleLib.fetchPerson().onSuccess( person -> exampleLib.fetchPhone(person.id, phone -> { - log.info("yayy! :) we got {} and {}", person, phone); - })).onFailure( ex -> { - log.error("whops :(", ex); - }); -``` - -Then I hear stuff like "Yeah is this cool. It is so short and concise. This -framebloat is great". BUT HERE'S THE CATCH: This code has a major flaw. Do you -spot it? It is a VERY simple example so we should spot such a major flaw -immediately. Just imagine a somewhat more realistic example whith tons of more -code around it and it will become even harder to ever spot what is wrong here. - -Couldn't find the flaw yet? To make it more obvious I'll rewrite the code to -good old synchronous, blocking, stone-age code, removing all that fancy -framework clutter. This will help us to SEE immediately what the problem is: - -```java - try{ - Person person = exampleLib.fetchPerson(); - try{ - Phone phone = exampleLib.fetchPhone(); - }catch( Throwable ex ){ - return; // HINT: This line should be your eye-catcher. - } - log.info("yayy! :) we got {} and {}", person, phone); - }catch( Throwable ex ){ - log.error("whops :(", ex); - } -``` - -Take a look at the inner catch block. Now every serious dev should see something -terrifying. It is catching EVERY kind of Throwables and JUST COMPLETELY IGNORES -THEM! This is exactly what the previous code does. The problem is, that even -experienced developers have a hard time to spot this. I say this, because in -nearly every pull-request I've to review, I spot this issue dozens of times -every few lines of code. This is terrifying for hunting bugs later. - - -Why not just using global error handler? - -Lazy devs may argue with DRY, that we should just register a global handler and -then just ignore all error handling everywher else. - -I think it is a good idea to register the global error handler. BUT we have to -consider the folowing: - -- If we're writing a library, we MUST NOT steal the error handler. This is the applications job. -- We still MUST have error handlers for every individual case. - -Why not registering from a lib? Bcause that error handler is GLOBAL to our -vertx context, and we will make it hard to use our library, because the -application cannot rely on this feature anymore, as all those fancy -dependencies are going to fight each other by overriding that handler with -their own one. Which makes the catch-all error handler pretty useless for the -application. - -So then why not rely on the application setting this handler? Good APIs are -easy to use right and hard to use wrong. Silently putting this responsibility -to the application as an option is an example of how to provocate wrong usage -of the library. So if a library does this, it just degrades the quality of the -libraries API. And usage of libraries with bad APIs should be if you would like -to prevent your future yourself unneccessary headache during maintenance later. - -So then why we still need individual handlers everywhere, even with a global -catch-all handler registered? The only thing such a global handler is good for, -is to make visible that error handling is missing SOMEWHERE. And this is the -problem. As the stack traces likely will NOT contain the location where you -have to apply a fix. You still have to guess where it could be. Here an example -code which needs to be fixed, but we do NOT yet know that it is the code, -because we only see the exception in the log and do NOT know where it should be -handled: - -```java - public Promise getOrCreatePhone(Id personId, Phone addIfMissing){ - var onDone = Promise.promise(); - exampleLib.fetchPerson(personId).onSuccess( person -> exampleLib.fetchPhone(person.id).onSuccess( phone -> { - onDone.complete(phone); - })).onFailure(onDone::fail); - return onDone.future(); - } -``` - -Whops this code accidentally misses that it should assign the new phone to the -person if there is no phone assigned yet. Unluckily we do NOT know this yet, as -this code is in production and we just found this error in our logs, maybe in -combination with some complaining customers that they get HTTP response -timeouts without any further details. With luck we somewhen randomly stumble -over this error. Notihng indicates a HTTP 500 nor any timeout. Just looks -unrelated to what we're searching. Exactly as all the other 100'000 usless -error logs in there. - -``` - foo.bar.HttpNotFoundException: 404 - at HttpLib.doGet0() - at HttpLib.doGet() - at HttpLib.get() - at EventLoop.runTask() - at EventLoop.nextTask() - at Thread.run() -``` - -So now how would you like to find out where this stack is coming from? There is -NOT EVEN ONE stacktrace line pointing to any of our application code at all. So -the only option left is to GUESS where it could come from. And both of the -message and the stacktrace are pretty useless. - -Thereofore: Just add this fu**ing error handling. It is not that hard: - -```java - public Promise getOrCreatePhone(Id personId, Phone addIfMissing){ - var onDone = Promise.promise(); - exampleLib.fetchPerson(personId, personEv -> { - if( personEv.failed() ){ - onDone.fail(new UnsupportedOperationException("TODO error handling", personEv.cause())); - return; - } - Person person = personEv.result(); - exampleLib.fetchPhone(person.id, phoneEv -> { - if( phoneEv.failed() ){ - onDone.fail(new UnsupportedOperationException("TODO error handling", phoneEv.cause())); - return; - } - onDone.complete(phone); - }); - }); - return onDone.future(); - } -``` - -Now the stack will look somewhat like below, where the very first line points -you straight to where the problem has to be fixed. - -``` - UnsupportedOperationException: TODO error handling - at getOrCreatePhone$lambda$1() line 42 - at EventLoop.runTask() - at EventLoop.nextTask() - at Thread.run() - Caused by: foo.bar.HttpNotFoundException: 404 - at HttpLib.doGet0() - at HttpLib.doGet() - at HttpLib.get() - at EventLoop.runTask() - at EventLoop.nextTask() - at Thread.run() -``` - -For those who now start yelling around "urgh, this code is soo long". Here's your choise: - -- Write short code which does NOT TELL YOU WHAT IS HAPPENING and gives bugs - many places to play hide-and-seek with you. Aka happy maintenance nightmare. -- Write some more code which makes it hard for bugs to hide. - - -Initialized: 2024-04-12 -LastModified: 2024-04-12 -Published: -Author and Copyright: Andreas Fankhauser - - -- cgit v1.1 From f072b8030e09cc87719207d6fbbd678708ea646a Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 16 Apr 2024 15:25:43 +0200 Subject: Fix preflux patch --- src/main/patch/preflux/default.patch | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/src/main/patch/preflux/default.patch b/src/main/patch/preflux/default.patch index 5b5f3f3..897d731 100644 --- a/src/main/patch/preflux/default.patch +++ b/src/main/patch/preflux/default.patch @@ -2,7 +2,6 @@ TODO describe (like in houston) -diff --git a/pom.xml b/pom.xml --- a/pom.xml +++ b/pom.xml @@ -57,6 +57,24 @@ @@ -30,8 +29,8 @@ diff --git a/pom.xml b/pom.xml -diff --git a/preflux-web/pom.xml b/preflux-web/pom.xml -index 752be702..8f91c053 100644 + + --- a/preflux-web/pom.xml +++ b/preflux-web/pom.xml @@ -14,6 +14,26 @@ @@ -61,8 +60,8 @@ index 752be702..8f91c053 100644 ch.post.it.paisa.alice -diff --git a/preflux-test/pom.xml b/preflux-test/pom.xml -index c50afbe5..115556c4 100644 + + --- a/preflux-test/pom.xml +++ b/preflux-test/pom.xml @@ -16,6 +16,26 @@ @@ -92,14 +91,14 @@ index c50afbe5..115556c4 100644 ch.post.it.paisa.alice -diff --git a/preflux-web/package.json b/preflux-web/package.json -index eda8e051..5353e179 100644 + + --- a/preflux-web/package.json +++ b/preflux-web/package.json @@ -10,10 +10,10 @@ - "check": "npm run format:check && npm run lint && npm run test:no-watch", + "check": "npm run format:check && npm run lint && npm run test", "check:ci": "npm run format:check && npm run lint", - "check:fix": "npm run format:fix && npm run lint:fix && npm run test:no-watch", + "check:fix": "npm run format:fix && npm run lint:fix && npm run test", - "format:check": "prettier --check \"src/main/angular/**/*.{ts,html,css,json}\"", - "format:fix": "prettier --write \"src/main/angular/**/*.{ts,html,css,json}\"", - "lint": "ng lint", @@ -108,12 +107,11 @@ index eda8e051..5353e179 100644 + "format:fix": "true", + "lint": "true", + "lint:fix": "true", - "test": "ng test --watch --browsers=ChromeHeadlessNoSandbox", - "test:ci": "npm run test:no-watch", - "test:no-watch": "ng test --no-watch --browsers=ChromeHeadlessNoSandbox" -diff --git a/preflux-web/src/main/java/org/apache/logging/slf4j/Log4jLogger.java b/preflux-web/src/main/java/org/apache/logging/slf4j/Log4jLogger.java -new file mode 100644 -index 00000000..e437dc34 + "test": "ng test --no-watch --browsers=ChromeHeadlessNoSandbox", + "test:ci": "npm run test", + "test:watch": "ng test --watch --browsers=ChromeHeadlessNoSandbox" + + --- /dev/null +++ b/preflux-web/src/main/java/org/apache/logging/slf4j/Log4jLogger.java @@ -0,0 +1,115 @@ @@ -232,3 +230,7 @@ index 00000000..e437dc34 + @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); } + +} + + + + -- cgit v1.1 From e62ab6725ace4b9893e28001f144ce7f65fef2b3 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 16 Apr 2024 18:45:09 +0200 Subject: FindFullDisks.c continued. --- doc/note/openshift/dbg-mem-issues.txt | 2 +- src/main/c/paisa-fleet/FindFullDisks.c | 112 ++++++++++++++++++++++++--------- 2 files changed, 82 insertions(+), 32 deletions(-) diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index 59afa2f..ef69f7a 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -113,7 +113,7 @@ true `# Get made heap dump` \ true `# Probe` \ && stage=prod \ - && logFile="readyness-houston-${stage:?}-$(date -u +%Y%m%d-%H%M%SZ).log" \ + && logFile="houston-ready-${stage:?}-$(date -u +%Y%m%d-%H%M%SZ).log" \ && printf 'c; when ;rspCode; connectSec; trsfSec; totlSec; curlExit\n' | tee -a "${logFile:?}" \ && while true; do true \ && printf 'r;%s;%7d;%11.3f;%8.3f;%8.3f;%9d\n' \ diff --git a/src/main/c/paisa-fleet/FindFullDisks.c b/src/main/c/paisa-fleet/FindFullDisks.c index ac494f2..429b71c 100644 --- a/src/main/c/paisa-fleet/FindFullDisks.c +++ b/src/main/c/paisa-fleet/FindFullDisks.c @@ -33,18 +33,23 @@ true `# make FindFullDisks` \ #if !NDEBUG # define REGISTER register # define LOGDBG(...) fprintf(stderr, __VA_ARGS__) +# define IF_DBG(expr) expr #else # define REGISTER # define LOGDBG(...) +# define IF_DBG(expr) #endif #define LOGERR(...) fprintf(stderr, __VA_ARGS__) + typedef struct FindFullDisks FindFullDisks; typedef struct Device Device; +#define MAGIC_FindFullDisks 0xB5410200 struct FindFullDisks { + IF_DBG(int mAGIC); int flg; const char *sshUser; int sshPort; @@ -61,9 +66,14 @@ struct FindFullDisks { }; +#define MAGIC_Device 0xAB420200 struct Device { - char hostname[sizeof"lunkwill-0123456789AB"]; - char eddieName[sizeof"eddie12345"]; + IF_DBG(int mAGIC); + struct FindFullDisks *app; + char hostname[sizeof"lunkwill-0123456789AB_____"]; + char eddieName[sizeof"eddie12345_____"]; + char stdoutBuf[8192]; + int stdoutBuf_cap, stdoutBuf_len; }; @@ -77,9 +87,9 @@ static void printHelp( void ){ printf("%s%s%s", " \n" " ", strrchr(__FILE__,'/')+1, "\n" " \n" - " Expected format on stdin is:\n" + " Expected format on stdin is a CSV like:\n" " \n" - " eddie00042 lunkwill-ABBABEAFABBA \n" + " eddie00042 lunkwill-ABBABEAFABBA \n" " ...\n" " \n" " Options:\n" @@ -134,10 +144,32 @@ validateArgs:; static void no_op( void*_ ){} +static void examineDeviceResult( void*device_ ){ + REGISTER int err; + Device*const device = device_; assert(device->mAGIC = MAGIC_Device); + //FindFullDisks*const app = device->app; assert(app->mAGIC == MAGIC_FindFullDisks); + FILE *outFd = NULL; + if( device->stdoutBuf_len <= 0 ){ /*nothing to do*/ goto endFn; } + char outName[sizeof"result/eddie12345-lunkwill-1234567890123456.log"]; + err = snprintf(outName, sizeof outName, "result/%s-%s.log", device->eddieName, device->hostname); + assert(err < sizeof outName); + outFd = fopen(outName, "wb"); + if( outFd == NULL ){ LOGDBG("assert(fopen(%s) != %d) %s:%d\n", outName, errno, __FILE__, __LINE__); abort(); } + err = fwrite(device->stdoutBuf, 1, device->stdoutBuf_len, outFd); + assert(err == device->stdoutBuf_len); +endFn: + if( outFd != NULL ) fclose(outFd); +} + + static void Child_onStdout( const char*buf, int buf_len, void*cls ){ - //FindFullDisks*const app = cls; + Device*const device = cls; assert(device->mAGIC = MAGIC_Device); + FindFullDisks*const app = device->app; assert(app->mAGIC == MAGIC_FindFullDisks); if( buf_len > 0 ){ /*another chunk*/ - printf("%.*s", buf_len, buf); + if( device->stdoutBuf_len + buf_len >= device->stdoutBuf_cap ) assert(!"TODO_VD8CAIVAgBDwIA4mECAKVjAgB1XwIAfk"); + memcpy(device->stdoutBuf + device->stdoutBuf_len, buf, buf_len); + device->stdoutBuf_len += buf_len; + //printf("%.*s", buf_len, buf); }else{ /*EOF*/ assert(buf_len == 0); } @@ -145,21 +177,34 @@ static void Child_onStdout( const char*buf, int buf_len, void*cls ){ static void Child_onJoined( int retval, int exitCode, int sigNum, void*cls ){ - FindFullDisks*const app = cls; - LOGDBG("[TRACE] %s(%d, %d, %d)\n", __func__, retval, exitCode, sigNum); + Device*const device = cls; assert(device->mAGIC == MAGIC_Device); + FindFullDisks*const app = device->app; assert(app->mAGIC == MAGIC_FindFullDisks); + if( retval != 0 || exitCode != 0 || sigNum != 0 ){ + LOGDBG("[DEBUG] %s(%d, %d, %d)\n", __func__, retval, exitCode, sigNum); + } assert(app->numInProgress > 0); app->numInProgress -= 1; - //LOGDBG("[DEBUG] numInProgress decremented is now %d\n", app->numInProgress); + (*app->env)->enqueBlocking(app->env, examineDeviceResult, device); (*app->env)->enqueBlocking(app->env, beginNextDevice, app); } -static void visitDevice( FindFullDisks*app, const Device*device ){ - assert(device != NULL); +static void visitDevice( FindFullDisks*app, Device*device ){ + assert(device != NULL && device->mAGIC == MAGIC_Device); assert(device < app->devices + app->devices_cnt); LOGERR("\n[INFO ] %s \"%s\" (behind \"%s\")\n", __func__, device->hostname, device->eddieName); int err; char eddieCmd[2048]; + //err = snprintf(eddieCmd, sizeof eddieCmd, "true" + // " && HOSTNAME=$(hostname|sed 's_.pnet.ch__')" + // " && STAGE=$PAISA_ENV" + // " && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\"" + // " && if test \"$(echo ${HOSTNAME}|sed -E 's_^vted_teddie_g')\" != \"%s\"; then true" + // " && echo wrong host. Want %s found $HOSTNAME && false" + // " ;fi" + // " && df", + // device->eddieName, device->eddieName + //); err = snprintf(eddieCmd, sizeof eddieCmd, "true" " && HOSTNAME=$(hostname|sed 's_.pnet.ch__')" " && STAGE=$PAISA_ENV" @@ -181,9 +226,10 @@ static void visitDevice( FindFullDisks*app, const Device*device ){ strncmp("fook-",device->hostname,5) ? device->hostname : "fook" ); assert(err < sizeof eddieCmd); - assert(app->sshPort > 0 && app->sshPort < 0xFFFF); - char sshPortStr[12]; - sprintf(sshPortStr, "%d", app->sshPort); + assert(app->sshPort > 0 && app->sshPort <= 0xFFFF); + char sshPortStr[sizeof"65535"]; + err = snprintf(sshPortStr, sizeof sshPortStr, "%d", app->sshPort); + assert(err < (int)sizeof sshPortStr); char userAtEddie[64]; err = snprintf(userAtEddie, sizeof userAtEddie, "%s@%s", app->sshUser, device->eddieName); assert(err < sizeof userAtEddie); @@ -201,11 +247,11 @@ static void visitDevice( FindFullDisks*app, const Device*device ){ //for( int i = 0 ; childArgv[i] != NULL ; ++i ) LOGDBG(" \"%s\"", childArgv[i]); //LOGDBG("\n\n"); app->child = (*app->env)->newProcess(app->env, &(struct Garbage_Process_Mentor){ - .cls = app, + .cls = device, .usePathSearch = !0, .argv = childArgv, .onStdout = Child_onStdout, - //.onStderr = Child_onStderr, + //.onStderr = , .onJoined = Child_onJoined, }); assert(app->child != NULL); @@ -214,15 +260,15 @@ static void visitDevice( FindFullDisks*app, const Device*device ){ static void beginNextDevice( void*cls ){ - FindFullDisks *app = cls; + FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks); maybeBeginAnotherOne: if( app->numInProgress >= app->maxParallel ){ - LOGDBG("[DEBUG] Already %d/%d in progress. Do NOT trigger more for now.\n", - app->numInProgress, app->maxParallel); + //LOGDBG("[DEBUG] Already %d/%d in progress. Do NOT trigger more for now.\n", + // app->numInProgress, app->maxParallel); goto endFn; } if( app->iDevice >= app->devices_cnt ){ - LOGDBG("[INFO ] Work on %d devices triggered. No more devices to trigger.\n", app->iDevice); + //LOGDBG("[INFO ] Work on %d devices triggered. No more devices to trigger.\n", app->iDevice); goto endFn; } assert(app->iDevice >= 0 && app->iDevice < INT_MAX); @@ -237,7 +283,7 @@ endFn:; static void onCsvRow( struct Garbage_CsvIStream_BufWithLength*row, int numCols, void*cls ){ REGISTER int err; - FindFullDisks *app = cls; + FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks); if( app->exitCode ) return; if( numCols != 2 ){ LOGERR("[ERROR] Expected 2 column in input CSV but found %d\n", numCols); @@ -250,6 +296,9 @@ static void onCsvRow( struct Garbage_CsvIStream_BufWithLength*row, int numCols, app->devices = tmp; } #define DEVICE (app->devices + app->devices_cnt) + IF_DBG(DEVICE->mAGIC = MAGIC_Device); + DEVICE->app = app; + DEVICE->stdoutBuf_cap = sizeof DEVICE->stdoutBuf / sizeof*DEVICE->stdoutBuf; if( row[0].len >= sizeof DEVICE->eddieName ){ LOGERR("[ERROR] eddieName too long: len=%d\n", row[0].len); app->exitCode = -1; return; @@ -268,29 +317,29 @@ static void onCsvRow( struct Garbage_CsvIStream_BufWithLength*row, int numCols, static void onCsvParserCloseSnkDone( int retval, void*app_ ){ - FindFullDisks *app = app_; + FindFullDisks*const app = app_; assert(app->mAGIC == MAGIC_FindFullDisks); LOGDBG("[DEBUG] Found %d devices in input.\n", app->devices_cnt); (*app->env)->enqueBlocking(app->env, beginNextDevice, app); } static void onCsvParserWriteDone( int retval, void*cls ){ - FindFullDisks *app = cls; - if( retval <= 0 ) assert(!"TODO_bD0CAO1tAgDaNgIACzcCAIsOAgBkXgIA"); + FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks); + if( retval <= 0 ){ LOGDBG("assert(retval != %d) %s:%d\n", retval, __FILE__, __LINE__); abort(); } (*app->env)->enqueBlocking(app->env, feedNextChunkFromStdinToCsvParser, app); } static void feedNextChunkFromStdinToCsvParser( void*cls ){ REGISTER int err; - FindFullDisks *app = cls; + FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks); if( app->exitCode ) return; #define SRC (stdin) - if( app->inBuf == NULL || app->inBuf_cap < 1024 ){ + if( app->inBuf == NULL || app->inBuf_cap < 1<<15 ){ app->inBuf_cap = 1<<15; - void *tmp = realloc(app->inBuf, app->inBuf_cap*sizeof*app->inBuf);; - if( tmp == NULL ){ assert(!"TODO_TT8CAGQLAgCoawIA9jgCANA6AgBTaAIA"); } - app->inBuf = tmp; + if( app->inBuf ) free(app->inBuf); + app->inBuf = malloc(app->inBuf_cap*sizeof*app->inBuf);; + if( app->inBuf == NULL ){ assert(!"TODO_TT8CAGQLAgCoawIA9jgCANA6AgBTaAIA"); } } err = fread(app->inBuf, 1, app->inBuf_cap, SRC); if( err <= 0 ){ @@ -304,12 +353,12 @@ static void feedNextChunkFromStdinToCsvParser( void*cls ){ static void initCsvParserForDeviceListOnStdin( void*cls ){ - FindFullDisks *app = cls; + FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks); static struct Garbage_CsvIStream_Mentor csvMentor = { .onCsvRow = onCsvRow, .onCsvDocEnd = no_op, }; - static struct Garbage_CsvIStream_Opts csvOpts = { .delimCol = ';' }; + struct Garbage_CsvIStream_Opts csvOpts = { .delimCol = ';' }; app->csvSrc = (*app->env)->newCsvIStream(app->env, &csvOpts, &csvMentor, app); feedNextChunkFromStdinToCsvParser(app); } @@ -319,6 +368,7 @@ int main( int argc, char**argv ){ void *envMemory[SIZEOF_struct_GarbageEnv/sizeof(void*)]; FindFullDisks app = {0}; assert((void*)0 == NULL); #define app (&app) + IF_DBG(app->mAGIC = MAGIC_FindFullDisks); if( parseArgs(argc, argv, app) ){ app->exitCode = -1; goto endFn; } if( app->flg & FLG_isHelp ){ printHelp(); goto endFn; } app->env = GarbageEnv_ctor(envMemory, sizeof envMemory); -- cgit v1.1 From 842dd8260f8471e30f3b15e5a29a65fbcedcee15 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 18 Apr 2024 14:25:35 +0200 Subject: Fix qemu accel for stupid systems. --- doc/note/qemu/qemu.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 13dc325..f3fc996 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -73,6 +73,7 @@ qemu-system-x86_64 \ ## Broken systems likely need some of those too `# Fix broken hosts` \ -L "${QEMU_HOME:?}/Bios" -bios "${QEMU_HOME:?}/Bios/bios-256k.bin" \ + -accel whpx,kernel-irqchip=off `# "https://github.com/Tech-FZ/EmuGUI/issues/72#issuecomment-1940933918"` \ `# Fix broken guests` \ -device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet \ -- cgit v1.1 From 958277606df43a8efeca67d902adab26e8ad25e7 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 18 Apr 2024 15:15:52 +0200 Subject: (qemu) Add hints for proxy config. --- doc/note/qemu/qemu.txt | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index f3fc996..ee16140 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -239,6 +239,9 @@ NOTE: Couldn't yet test any of those commands. ## Alpine PostInstall true \ + && `# HINT: environ setup does not work autmoatically during login. has to be sourced manually.` \ + && P="http://10.0.2.2:3128/" \ + && printf 'export no_proxy=127.0.0.1,10.0.2.*\nexport http_proxy=%s\nexport https_proxy=%s\n' "${P:?}" "${P:?}" >> '/etc/environment' \ && apk add openssh-server \ && rc-update add sshd \ && sed -i -E 's;^# *(PermitRootLogin).+$;\1 yes;' /etc/ssh/sshd_config \ @@ -255,9 +258,9 @@ NOTE: Couldn't yet test any of those commands. TODO: move this to a better place. Eg: debian/setup.txt or whatever. true \ - && http_proxy= \ - && https_proxy= \ && no_proxy=127.0.0.1,10.0.2.* \ + && http_proxy=http://10.0.2.2:3128 \ + && https_proxy=http://10.0.2.2:3128 \ && SUDO= \ && true \ && if [ -n "$http_proxy" ]; then true \ -- cgit v1.1 From 7c9d034ba5c1ca43d8544f98d40691fde4d139da Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 18 Apr 2024 19:11:11 +0200 Subject: qemu serial stdio (untested). --- doc/note/openshift/dbg-mem-issues.txt | 2 +- doc/note/qemu/qemu.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt index ef69f7a..c730629 100644 --- a/doc/note/openshift/dbg-mem-issues.txt +++ b/doc/note/openshift/dbg-mem-issues.txt @@ -113,7 +113,7 @@ true `# Get made heap dump` \ true `# Probe` \ && stage=prod \ - && logFile="houston-ready-${stage:?}-$(date -u +%Y%m%d-%H%M%SZ).log" \ + && logFile="houston-${stage:?}-ready-$(date -u +%Y%m%d-%H%M%SZ).log" \ && printf 'c; when ;rspCode; connectSec; trsfSec; totlSec; curlExit\n' | tee -a "${logFile:?}" \ && while true; do true \ && printf 'r;%s;%7d;%11.3f;%8.3f;%8.3f;%9d\n' \ diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index ee16140..d4bc3aa 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -63,7 +63,7 @@ qemu-system-x86_64 \ `# USB pass-through` \ -usb -device usb-host,id=myUsbQemuId,vendorid=0xFFFF,productid=0xFFFF \ `# Choose ONE of those for graphic output` \ - -nographic \ + -nographic -serial stdio \ -device VGA \ -display sdl,grab-mod=rctrl \ -display gtk,show-menubar=on \ -- cgit v1.1 From cc6ff38c7108a1ea2f69aee5cf87b0517715ecd7 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 19 Apr 2024 18:01:52 +0200 Subject: Add no-fetch in changelog-gen. Scratch some automation for non-slim in platform. --- src/main/lua/git/GitflowChangelogGen.lua | 17 +++ src/main/nodejs/paisa-nonslim/README.txt | 3 + src/main/nodejs/paisa-nonslim/foo.js | 252 +++++++++++++++++++++++++++++++ 3 files changed, 272 insertions(+) create mode 100644 src/main/nodejs/paisa-nonslim/README.txt create mode 100644 src/main/nodejs/paisa-nonslim/foo.js diff --git a/src/main/lua/git/GitflowChangelogGen.lua b/src/main/lua/git/GitflowChangelogGen.lua index 519d12b..3b44ac3 100644 --- a/src/main/lua/git/GitflowChangelogGen.lua +++ b/src/main/lua/git/GitflowChangelogGen.lua @@ -16,6 +16,9 @@ function printHelp() .." --remote \n" .." Name of the git remote to use. Defaults to 'upstream'.\n" .." \n" + .." --no-fetch\n" + .." Do NOT update refs from remote. Just use what we have local.\n" + .." \n" ) end @@ -34,6 +37,8 @@ function parseArgs( app ) iA = iA + 1; arg = _ENV.arg[iA] if not arg then log:write("EINVAL: --remote needs value\n")return end app.remoteName = arg + elseif arg == "--no-fetch" then + app.isFetch = false elseif arg == "--help" then app.isHelp = true; return 0 else @@ -107,6 +112,17 @@ end function run( app ) local snk = io.stdout + if app.isFetch then + -- Make sure refs are up-to-date + local gitFetch = "git fetch \"".. app.remoteName .."\"" + log:write("[DEBUG] ".. gitFetch .."\n") + local gitFetch = io.popen(gitFetch) + while true do + local buf = gitFetch:read(1<<16) + if not buf then break end + log:write(buf) + end + end -- Collect input local git = "git log --date-order --first-parent --decorate --since \"".. app.since.."\"" .." \"".. app.remoteName .."/master\"" @@ -163,6 +179,7 @@ function main() local app = { since = false, remoteName = false, + isFetch = true, fullHistory = {}, fullHistoryRdBeg = 1, commits = {}, diff --git a/src/main/nodejs/paisa-nonslim/README.txt b/src/main/nodejs/paisa-nonslim/README.txt new file mode 100644 index 0000000..e3a94f7 --- /dev/null +++ b/src/main/nodejs/paisa-nonslim/README.txt @@ -0,0 +1,3 @@ + +Created 20240419 as it seems we need some automation for those tasks. + diff --git a/src/main/nodejs/paisa-nonslim/foo.js b/src/main/nodejs/paisa-nonslim/foo.js new file mode 100644 index 0000000..ac424c6 --- /dev/null +++ b/src/main/nodejs/paisa-nonslim/foo.js @@ -0,0 +1,252 @@ +;(function(){ "use-strict"; + + const child_process = require("child_process"); + const promisify = require("util").promisify; + const zlib = require("zlib"); + const noop = function(){}; + const log = process.stderr; + + setImmediate(main); + + + function parseArgs( argv, app ){ + log.write("[WARN ] TODO impl parseArgs()\n"); + return 0; + } + + + function workdirOfSync( app, thingyName ){ + if( typeof thingyName !== "string" || !/^[a-z-]+$/.test(thingyName) ) throw TypeError(thingyName); + return "C:/work/projects/isa-svc/"+ thingyName; + } + + + function isWorktreeClean( app, thingyName, onDone ){ + if( typeof onDone != "function" ) throw TypeError("onDone"); + var child = child_process.spawn( + "sh", [ "-c", "git status --porcelain | grep ." ], + { cwd: workdirOfSync(app, thingyName), windowsHide: true, } + ); + child.on("error", console.error.bind(console)); + child.stdout.on("data", noop); + child.stderr.on("data", function( buf ){ log.write(buf.toString()); }); + child.on("close", function( code, signal ){ + if( signal !== null ){ + throw Error("code "+ code +", signal "+ signal +""); + }else{ + onDone(null, code !== 0); + } + }); + } + + + function getDropSlimArtifactsTagInPlatformPatch( app, onDone ){ + if( typeof onDone != "function" ) throw TypeError("onDone"); + /* patch which empties the tag in + * "poms/service/paisa-service-superpom/pom.xml" as described in + * SDCISA-15648 */ + var patch = "" + +"tVnNcuMoEL7nKbTXzUKc2WRmQk1tZav2Ppd9AYRaEgoCCpBj79NvI0u2E9uJxUg+CJDpr4Fu+k+F" + +"LMuMkEqGjN9Z0/o7D24tBdxZLj0nw4j4zoLD/+McumlVlk+ZfSN1AZssL+9L8fj9cUXpt+JhxfFR" + +"Zver1deHhxtCyLQV3Nze3k5cxfNzRp5W9398f8puY/ste36+yT77/birDFf+rytmCqNLWXWOB2n0" + +"FQQ9kXUG1xkkIAtyFUmk8kq2fyNVyUWYQhh/3FiulORaADvq//iNkH9//vOTZRrW4LJCOhBBbbPO" + +"Q5FJnVnFQ2lcS8hEhqKm1vhAZaC9iChXMvJWvZgGce0ODxsHC6AXpuVSL4WOMhTg/VLwylSV1Iud" + +"zSvkCdDGVZR7iyrSsKF9BY6K0+uR49rLINcw6k++zbx1cRe7yX6yFp1h6MKCSotWo+Q+gEPbwYTi" + +"3rc8wMXd1TIHp3EGWePxFjwYl8RTG1DSeOrABwWBnXlHYRNolB72L66naaWWm6WWMDvbg6Iit9Zo" + +"GjWS7fq9dpIWAsdj5ZP3gwie5MB1F6Ty7OTNAkfYMxCmAMHejFKBlEKtRqfi2Zl3aaClVNBZZXjB" + +"Tl+lQUrDDt00CMV1xY4HiTA7i8nejWcFI9zK2QHZ0xNdobcCT7QJBDbSh2lMNNoHX1KoBRc1sKFN" + +"MPAj0H+tcbZmQzv3ZYlWfW866d50sjPm9FeAG77mG4qBlscDhRj5HBg0lpMv9H66PHuPZOPx0joE" + +"i+K0RoMOnvVDJbE/K2Kak76I18p2ajxx7BTxUMWLH031MCBco+LyNMP0GXZCAPQ5aPQpOWYocwBH" + +"rLC1b8HjC9J49+f96nJsVEOhjHhJuj9VDE5K6WuKoRzbqTqoaTgj1Z58+nXYUR7kz96/SLKY1Hea" + +"YpYj1yPoC3eBH71KWWUUWhQ6wm3y9HW9hUlTz1Mc2dqJ0ota0OTGezr6ln40epZFg9bI26vyoWGN" + +"UMRgFE764XKheb/b3t2wXTNRA0zBSUDbx/a9JM/YxAuuJN64obNELF4ZUymgMXqkJWpI3lWeRWOy" + +"epwGJg2tZKi7vPeFBIWOa0QkPzEoPRJ3p0gw54U9i4wHzzUocWyRa/9MzVcvAyY5/zFBOur3uRkK" + +"3wgZtksES2d4XmRzJnFL4GmQl1kw4RsqExj8DuleeoD9AWjMl9E0zYPpt+j5W+LRr3V+3nVimuuk" + +"mAkTheSIgwqDXpfqgs/hrqGWQqFi6Rdizet851pzBwUap7Ws5lzvESDezDmRY1kRE4pYQCUmj0qW" + +"iH6wq/0zEaAvsZWOt/Bq3AsbS27GflaVw8ixXYs053+B6a7ONxterNdcrtUctjHnHoRB4W4SkriP" + +"8eK3CWvcnLgp7vACGGwsOgs/ObL+ADLA1DLGB2Ao4zmxUO1TMoghYI4pyaH7SzWEMWpgn4YP0OZQ" + +"FGgom3YT4oTEEHpHTEc4doKbvJehtLUvcVlj1JcF9xND8igX4bo293RMp9k+r97Je2bQtKJa3inV" + +"47Yo52LLDt3Ee9w4g8kj2zXTU4xQm66qQ7wWnm4wUgDesqGd8Eny7t03ydurKd8SngBdyf74Q+qV" + +"fE3nBFw5O/4wMUer3RsAqjGhigVz+vvN/wsTxCQ=" + ; + patch = Buffer.from(patch, 'base64'); + patch = zlib.inflateRaw(patch, function( ex, patch ){ + if( ex ){ throw ex; } + setImmediate(onDone, null, patch); + }); + } + + + function getJettyServiceNamesAsArray( app, onDone ){ + setImmediate(onDone, null, [ /*TODO get via args/file */ + TODO_1zwCAF4NAgAfcAIA628CAJE4AgDnRgIA + ]); + } + + + function dropSlimFromAllJenkinsfiles( app, onDone ){ + var iSvc = -1; + var jettyServices; + var jettyService; + getJettyServiceNamesAsArray(app, function( ex, jettyServices_ ){ + if( ex ){ throw ex; } + jettyServices = jettyServices_; + nextJettyService(); + }); + function nextJettyService(){ + if( ++iSvc >= jettyServices.length ){ onNoMoreJettyServices(); return; } + jettyService = jettyServices[iSvc]; + isWorktreeClean(app, jettyService, onIsWorktreeCleanRsp); + } + function onIsWorktreeCleanRsp( ex, isClean ){ + if( ex ) throw ex; + if( !isClean ){ + log.write("[WARN ] Worktree not clean. Will skip: "+ jettyService +"\n"); + nextJettyService(); + return; + } + log.write("[DEBUG] Patching \""+ jettyService +"/Jenkinsfile\"\n"); + var child = child_process.spawn( + "sed", [ "-i", "-E", "s_^(.*?buildMaven.*?),? *slim: *true,? *(.*?)$_\\1\\2_", "Jenkinsfile" ], + { cwd: workdirOfSync(app, jettyService) }, + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", function( buf ){ log.write(buf.toString()); }); + child.on("close", function(){ + nextJettyService(); + }); + } + function onNoMoreJettyServices( app ){ + onDone(null, null); + } + } + + + function checkoutUpstreamDevelop( app, thingyName, onDone){ + var child; + child = child_process.spawn( + "sh", ["-c", "git checkout upstream/develop || git checkout origin/develop"], + { cwd: workdirOfSync(app, thingyName), }); + child.on("error", console.error.bind(console)); + child.stderr.on("data", function( buf ){ log.write(buf); }); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + }else{ + onDone(null, null); + } + }); + } + + + function checkoutUpstreamDevelopForAllJettyServices( app, onDone){ + if( typeof onDone != "function" ) throw TypeError("onDone"); + var iSvc = -1, jettyServices, jettyService; + getJettyServiceNamesAsArray(app, function( ex, ret ){ + if( ex ) throw ex; + jettyServices = ret; + nextJettyService(); + }); + function nextJettyService( ex ){ + if( ex ) throw ex; + if( ++iSvc >= jettyServices.length ){ onDone(null, null); return; } + jettyService = jettyServices[iSvc]; + log.write("[DEBUG] git checkout "+ jettyService +"\n"); + checkoutUpstreamDevelop(app, jettyService, nextJettyService); + } + } + + + function fetchChangesFromGitit( app, thingyName, onDone ){ + var child; + child = child_process.spawn( + "sh", ["-c", "git fetch upstream || git fetch origin"], + { cwd: workdirOfSync(app, thingyName), }); + child.on("error", console.error.bind(console)); + child.stderr.on("data", function( buf ){ log.write(buf); }); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + }else{ + onDone(null, null); + } + }); + } + + + function fetchChangesFromGititForAllJettyServices( app, onDone ){ + var iSvc = -1, jettyServices, jettyService; + getJettyServiceNamesAsArray(app, function( ex, ret ){ + if( ex ) throw ex; + jettyServices = ret; + nextJettyService(); + }); + function nextJettyService( ex ){ + if( ex ) throw ex; + if( ++iSvc >= jettyServices.length ){ onDone(null, null); return; } + jettyService = jettyServices[iSvc]; + log.write("[DEBUG] git fetch "+ jettyService +"\n"); + fetchChangesFromGitit(app, jettyService, nextJettyService); + } + } + + + function patchAwaySlimPackagingInPlatform( app, onDone ){ + isWorktreeClean(app, "platform", function( ex, isClean ){ + if( ex ){ throw ex; } + if( !isClean ){ onDone(Error("Platform worktree not clean")); return; } + getDropSlimArtifactsTagInPlatformPatch(app, onPatchBufReady); + }); + function onPatchBufReady( ex, patch ){ + if( ex ){ throw ex; } + var gitApply = child_process.spawn( + "sh", ["-c", "git apply"], + { cwd: workdirOfSync(app, "platform"), }); + gitApply.on("error", console.error.bind(console)); + gitApply.stderr.on("data", function( buf ){ log.write(buf.toString()); }); + gitApply.stdout.on("data", noop); + gitApply.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ throw Error(""+ code +", "+ signal +""); } + onDone(null, null); + }); + gitApply.stdin.write(patch); + gitApply.stdin.end(); + } + } + + + function run( app ){ + patchAwaySlimPackagingInPlatform(app, onPatchAwaySlimPackagingInPlatformDone); + function onPatchAwaySlimPackagingInPlatformDone( ex, ret ){ + if( ex ){ log.write("[WARN ] "+ ex.message +"\n"); /*throw ex;*/ } + fetchChangesFromGititForAllJettyServices(app, + onFetchChangesFromGititForAllJettyServicesDone); + } + function onFetchChangesFromGititForAllJettyServicesDone( ex ){ + if( ex ){ throw ex; } + checkoutUpstreamDevelopForAllJettyServices(app, + onCheckoutUpstreamDevelopForAllJettyServicesDone); + } + function onCheckoutUpstreamDevelopForAllJettyServicesDone( ex ){ + if( ex ) throw ex; + dropSlimFromAllJenkinsfiles(app, onDropSlimFromAllJenkinsfilesDone); + } + function onDropSlimFromAllJenkinsfilesDone( ex ){ + if( ex ){ throw ex; } + log.write("[INFO ] App done\n"); + } + } + + + function main(){ + const app = Object.seal({ + isHelp: false, + maxParallel: 1, + }); + if( parseArgs(process.argv, app) !== 0 ){ os.exit(1); } + if( app.isHelp ){ printHelp(); return; } + run(app); + } + + +}()); -- cgit v1.1 From c7c4b8308b70c6f3fb5b464bf3038d7290d80e92 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 20 Apr 2024 01:56:55 +0200 Subject: Add ffmpeg filter to fix some loudness war. --- doc/note/ffmpeg/ffmpeg.txt | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/doc/note/ffmpeg/ffmpeg.txt b/doc/note/ffmpeg/ffmpeg.txt index 2b18a44..c992537 100644 --- a/doc/note/ffmpeg/ffmpeg.txt +++ b/doc/note/ffmpeg/ffmpeg.txt @@ -32,6 +32,18 @@ Use -codec:v copy to keep video, or -codec:v no for audio-only. -filter:a lowpass=f=16000 +## Fix Loudness war, bring bass back + + -af "equalizer=f=200:w=200:t=h:g=-9,equalizer=f=400:w=600:t=h:g=-9,equalizer=f=2000:w=2000:t=h:g=-12,equalizer=f=4000:w=4000:t=h:g=-12,equalizer=f=10000:w=10000:t=h:g=-12,volume=+0.0dB" + + +## Audio trim + +HINT: Repeat afade twice for log. + + -af "afade=t=in:d=500ms,afade=t=out:st=183427ms:d=500ms" \ + + ## Record Desktop ffmpeg -f gdigrab -framerate 6 -probesize 10M -offset_x 0 -offset_y 0 \ -- cgit v1.1 From e17b8876cfafae081784d5cf1258ce16c50e283a Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Mon, 22 Apr 2024 23:43:26 +0200 Subject: qemu fix --- doc/note/qemu/qemu.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index d4bc3aa..5ee13b2 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -46,7 +46,7 @@ Windoof: qemu-system-x86_64 \ -nodefaults `# <- TODO Fix network when using this` \ -accel kvm:whpx:hax:tcg -m size=2G -smp cores=$(nproc) \ - -monitor stdio \ + -monitor stdio -serial stdio `# coose ONE` \ `# Drives & Boot.` \ -boot order=dc \ -cdrom "path/to/cd.iso" \ @@ -63,7 +63,7 @@ qemu-system-x86_64 \ `# USB pass-through` \ -usb -device usb-host,id=myUsbQemuId,vendorid=0xFFFF,productid=0xFFFF \ `# Choose ONE of those for graphic output` \ - -nographic -serial stdio \ + -nographic \ -device VGA \ -display sdl,grab-mod=rctrl \ -display gtk,show-menubar=on \ -- cgit v1.1 From 301383f413f5cccca318515f90d27140029b9b46 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 23 Apr 2024 10:37:43 +0200 Subject: Add short help to noslim helper script. --- src/main/nodejs/paisa-nonslim/foo.js | 56 ++++++++++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 8 deletions(-) diff --git a/src/main/nodejs/paisa-nonslim/foo.js b/src/main/nodejs/paisa-nonslim/foo.js index ac424c6..8a92307 100644 --- a/src/main/nodejs/paisa-nonslim/foo.js +++ b/src/main/nodejs/paisa-nonslim/foo.js @@ -4,14 +4,54 @@ const promisify = require("util").promisify; const zlib = require("zlib"); const noop = function(){}; + const logAsString = function( buf ){ log.write(buf.toString()); }; const log = process.stderr; setImmediate(main); + function printHelp( argv, app ){ + process.stdout.write(" \n" + +" Autmoate some steps that are tedious manually.\n" + +" \n" + +" Options:\n" + +" \n" + +" --yolo\n" + +" WARN: Use this only if you know what you're doing! It potentially\n" + +" could damage stuff! Only use, if you understand what this script\n" + +" is doing!\n" + +" \n" + ); + } + + function parseArgs( argv, app ){ - log.write("[WARN ] TODO impl parseArgs()\n"); - return 0; + const STEP_fetchArg = 1, STEP_parseArg = 2, STEP_verify = 3; + var arg, iA = 1, step = STEP_fetchArg; + var isYolo = false; + while(1) switch( step ){ + case STEP_fetchArg: { + arg = argv[++iA]; + step = (arg === undefined) ? STEP_verify : STEP_parseArg; + break; } + case STEP_parseArg: { + if(0){ + }else if( arg == "--help" ){ + app.isHelp = true; return 0; + }else if( arg == "--yolo" ){ + isYolo = true; + }else{ + log.write("EINVAL: "+ arg +"\n"); + return -1; + } + step = STEP_fetchArg; break; } + case STEP_verify: { + if( !isYolo ){ log.write("EINVAL: Doing noting with zero args to prevent damage.\n"); return -1; } + return 0; } + default: + throw Error(step); + } + throw Error("unreachable"); } @@ -25,11 +65,11 @@ if( typeof onDone != "function" ) throw TypeError("onDone"); var child = child_process.spawn( "sh", [ "-c", "git status --porcelain | grep ." ], - { cwd: workdirOfSync(app, thingyName), windowsHide: true, } + { cwd: workdirOfSync(app, thingyName), } ); child.on("error", console.error.bind(console)); child.stdout.on("data", noop); - child.stderr.on("data", function( buf ){ log.write(buf.toString()); }); + child.stderr.on("data", logAsString); child.on("close", function( code, signal ){ if( signal !== null ){ throw Error("code "+ code +", signal "+ signal +""); @@ -111,12 +151,12 @@ { cwd: workdirOfSync(app, jettyService) }, ); child.on("error", console.error.bind(console)); - child.stderr.on("data", function( buf ){ log.write(buf.toString()); }); + child.stderr.on("data", logAsString); child.on("close", function(){ nextJettyService(); }); } - function onNoMoreJettyServices( app ){ + function onNoMoreJettyServices(){ onDone(null, null); } } @@ -203,7 +243,7 @@ "sh", ["-c", "git apply"], { cwd: workdirOfSync(app, "platform"), }); gitApply.on("error", console.error.bind(console)); - gitApply.stderr.on("data", function( buf ){ log.write(buf.toString()); }); + gitApply.stderr.on("data", logAsString); gitApply.stdout.on("data", noop); gitApply.on("close", function( code, signal ){ if( code !== 0 || signal !== null ){ throw Error(""+ code +", "+ signal +""); } @@ -243,7 +283,7 @@ isHelp: false, maxParallel: 1, }); - if( parseArgs(process.argv, app) !== 0 ){ os.exit(1); } + if( parseArgs(process.argv, app) !== 0 ){ process.exit(1); } if( app.isHelp ){ printHelp(); return; } run(app); } -- cgit v1.1 From bfb5d8c8c42bad9ffddb502611d43f13ff54a6bf Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 24 Apr 2024 18:56:25 +0200 Subject: Continued on noslim helper. --- src/main/nodejs/paisa-nonslim/foo.js | 629 +++++++++++++++++++++++++++++------ 1 file changed, 522 insertions(+), 107 deletions(-) diff --git a/src/main/nodejs/paisa-nonslim/foo.js b/src/main/nodejs/paisa-nonslim/foo.js index 8a92307..2ce8e68 100644 --- a/src/main/nodejs/paisa-nonslim/foo.js +++ b/src/main/nodejs/paisa-nonslim/foo.js @@ -1,11 +1,12 @@ ;(function(){ "use-strict"; const child_process = require("child_process"); + const fs = require("fs"); const promisify = require("util").promisify; const zlib = require("zlib"); const noop = function(){}; - const logAsString = function( buf ){ log.write(buf.toString()); }; const log = process.stderr; + const logAsString = function( buf ){ log.write(buf.toString()); }; setImmediate(main); @@ -16,65 +17,124 @@ +" \n" +" Options:\n" +" \n" - +" --yolo\n" - +" WARN: Use this only if you know what you're doing! It potentially\n" - +" could damage stuff! Only use, if you understand what this script\n" - +" is doing!\n" + +" --default\n" + +" Perform default action (whatever default means).\n" + +" \n" + +" --print-isaVersion\n" + +" Print a preflux isaVersion to stdout filled with the patched\n" + +" services.\n" + +" \n" + +" --reset-hard-to-develop\n" + +" Resets all the services back to develop. WARN if you've uncommitted\n" + +" work in some of those repos, IT WILL BE LOST!\n" +" \n" + +" --push | --push-force\n" + +" Create commits for patched services and push them to upstream. If\n" + +" not given, the change is only made locally (aka without cluttering\n" + +" remote git repo). The force variant will replace existing branches\n" + +" on the remnote. If given multiple times, less-invasive wins.\n" + +" \n" + // not impl yet + //+" --max-parallel \n" + //+" How many tasks to run concurrently. Defaults to 1. Which means to\n" + //+" do all the work sequentially (HINT: very handy for debugging).\n" + //+" \n" ); } function parseArgs( argv, app ){ - const STEP_fetchArg = 1, STEP_parseArg = 2, STEP_verify = 3; - var arg, iA = 1, step = STEP_fetchArg; - var isYolo = false; - while(1) switch( step ){ - case STEP_fetchArg: { - arg = argv[++iA]; - step = (arg === undefined) ? STEP_verify : STEP_parseArg; - break; } - case STEP_parseArg: { - if(0){ - }else if( arg == "--help" ){ + var hasArgs = false; + for( var iA = 2 ; iA < argv.length ; ++iA ){ + var arg = argv[iA]; + if( arg == "--help" ){ app.isHelp = true; return 0; - }else if( arg == "--yolo" ){ - isYolo = true; + }else if( arg == "--default" ){ + hasArgs = true; + }else if( arg == "--push" ){ + if( app.isPushForce ){ log.write("EINVAL: only one of push and push-force allowed\n"); return-1; } + app.isPush = true; + hasArgs = true; + }else if( arg == "--push-force" ){ + if( app.isPush ){ log.write("EINVAL: only one of push and push-force allowed\n"); return-1; } + app.isPushForce = true; + hasArgs = true; + }else if( arg == "--print-isaVersion" ){ + app.isPrintIsaVersion = true; + hasArgs = true; + }else if( arg == "--reset-hard-to-develop" ){ + app.isResetHardToDevelop = true; + hasArgs = true; + //}else if( arg == "--max-parallel" ){ + // arg = argv[++iA]; + // if( !/^[0-9]+$/.test(arg) ){ log.write("EINVAL: --max-parallel "+ arg +"\n"); return -1; } + // app.maxParallel = 0 + arg; }else{ log.write("EINVAL: "+ arg +"\n"); return -1; } - step = STEP_fetchArg; break; } - case STEP_verify: { - if( !isYolo ){ log.write("EINVAL: Doing noting with zero args to prevent damage.\n"); return -1; } - return 0; } - default: - throw Error(step); - } - throw Error("unreachable"); + } + if( !hasArgs ){ + log.write("EINVAL: Refuse to produce damage with zero args.\n"); + return -1; + } + return 0; + } + + + function isThingyNameValid( app, thingyName ){ + if( typeof thingyName !== "string" ) return false; + if( !/^[a-z-]+$/.test(thingyName) ) return false; + return true; } function workdirOfSync( app, thingyName ){ - if( typeof thingyName !== "string" || !/^[a-z-]+$/.test(thingyName) ) throw TypeError(thingyName); - return "C:/work/projects/isa-svc/"+ thingyName; + if( !isThingyNameValid(app, thingyName) ) throw TypeError(thingyName); + return app.workdir +"/"+ thingyName; } - function isWorktreeClean( app, thingyName, onDone ){ + function gitUrlOfSync( app, thingyName ){ + if( !isThingyNameValid(app, thingyName) ) throw TypeError(thingyName); + return "https://example.com/scm/isa/"+ thingyName +".git"; + } + + + function isCloned( app, thingyName, onDone){ if( typeof onDone != "function" ) throw TypeError("onDone"); var child = child_process.spawn( - "sh", [ "-c", "git status --porcelain | grep ." ], + "git", ["status", "--porcelain"], { cwd: workdirOfSync(app, thingyName), } ); child.on("error", console.error.bind(console)); child.stdout.on("data", noop); child.stderr.on("data", logAsString); child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + }else{ + onDone(null, true); + } + }); + } + + + function isWorktreeClean( app, thingyName, onDone ){ + if( typeof onDone != "function" ) throw TypeError("onDone"); + var isStdoutDirty = false; + var child = child_process.spawn( + "git", ["status", "--porcelain"], + { cwd: workdirOfSync(app, thingyName), } + ); + child.on("error", console.error.bind(console)); + child.stdout.on("data", function(){ isStdoutDirty = true; }); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ if( signal !== null ){ throw Error("code "+ code +", signal "+ signal +""); }else{ - onDone(null, code !== 0); + onDone(null, !isStdoutDirty); } }); } @@ -119,29 +179,208 @@ function getJettyServiceNamesAsArray( app, onDone ){ setImmediate(onDone, null, [ /*TODO get via args/file */ - TODO_1zwCAF4NAgAfcAIA628CAJE4AgDnRgIA + "allitnil", "babelfish", "barman", + //"benjy", "bentstick", "blart", "captain", "caveman", + //"colin", "deep", "drdan", "guide", "heimdall", "hooli", "jeltz", "kwaltz", "lazlar", + //"loon", "magician", "megacamel", "minetti", "mown", "neutron", "nowwhat", "pobble", + //"poodoo", "prosser", "rob", "slarti", "streetmentioner", "thor", "towel", "trillian", + //"vannharl", "vogon", "vroom", "zaphake", "zem", ]); } + function pushService( app, thingyName, onDone ){ + if( typeof onDone != "function" ){ throw TypeError("onDone"); } + var iRemoteNameToTry = 0; + push(); + function push( ex, isClean ){ + if( ex ) throw ex; + var remoteName = app.remoteNamesToTry[iRemoteNameToTry++]; + if( remoteName === undefined ){ endFn(Error("No more remote names. s="+ thingyName +"")); return; } + log.write("[DEBUG] "+ thingyName +" - git push "+ remoteName +" " + + app.branchName +(app.isPushForce?" --force":"")+"\n"); + argv = ["push", remoteName, "refs/heads/"+app.branchName +":refs/heads/"+ app.branchName]; + if( app.isPushForce ) argv.push("--force"); + var child = child_process.spawn( + "git", argv, + { cwd:workdirOfSync(app, thingyName) } + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( code === 128 ){ /* retry with next upstream name */ + push(); return; + }else if( code !== 0 || signal !== null ){ + endFn(Error("code="+ code +", signal="+ signal +"")); + return; + } + endFn(); + }); + } + function endFn( ex, ret ){ + onDone(ex, ret); + } + } + + + function commitService( app, thingyName, onDone ){ + if( typeof onDone != "function" ){ throw Error("onDone"); } + incrNumTasks(app); + isWorktreeClean(app, thingyName, gitAdd); + function gitAdd( ex, isClean ){ + if( ex ) throw ex; + if( isClean ){ + log.write("[INFO ] Nothing to commit in \""+ thingyName +"\"\n"); + endFn(null, null); return; + } + log.write("[DEBUG] "+ thingyName +"$ git add Jenkinsfile\n"); + var child = child_process.spawn( + "git", ["add", "Jenkinsfile"], + { cwd:workdirOfSync(app, thingyName) } + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + endFn(Error("code="+ code +", signal="+ signal +"")); + return; + } + gitCommit(); + }); + } + function gitCommit( ex ){ + if( ex ) throw ex; + log.write("[DEBUG] "+ thingyName +"$ git commit -m \""+ app.commitMsg +"\"\n"); + var child = child_process.spawn( + "git", ["commit", "-m", app.commitMsg], + { cwd:workdirOfSync(app, thingyName) } + ); + var stdoutBufs = []; + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.stdout.on("data", function( buf ){ stdoutBufs.push(buf); }); + child.on("exit", function( code, signal ){ + if( code !== 0 || signal !== null ){ + var stdoutStr = ""; + for( var buf in stdoutBufs ){ stdoutStr += buf.toString(); } + if( stdoutStr.length ){ log.write(stdoutStr); } + endFn(Error("code="+ code +", signal="+ signal +"")); + return; + } + createBranch(); return; + }); + } + function createBranch( ex ){ + if( ex ) throw ex; + log.write("[DEBUG] "+ thingyName +"$ git branch "+ app.branchName +"\n"); + var child = child_process.spawn( + "git", ["branch", "-f", app.branchName], + { cwd:workdirOfSync(app, thingyName) } + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("exit", function( code, signal ){ + if( code !== 0 || signal !== null ){ + endFn(Error("code="+ code +", signal="+ signal +"")); + return; + } + endFn(); return; + }); + } + function endFn( ex, ret ){ + decrNumTasks(app); + onDone(ex, ret); + } + } + + + function commitAllServices( app, onDone ){ + var iSvc = 0; + var services; + incrNumTasks(app); + getJettyServiceNamesAsArray(app, onGetJettyServiceNamesAsArrayDone); + function onGetJettyServiceNamesAsArrayDone( ex, ret ){ + if( ex ) throw ex; + services = ret; + nextService(null); + } + function nextService( ex ){ + if( ex ) throw ex; + if( iSvc >= services.length ){ endFn(null); return; } + var thingyName = services[iSvc++]; + if( !thingyName ) throw Error("assert(thingyName != NULL)"); + commitService(app, thingyName, nextService); + } + function endFn( ex ){ + decrNumTasks(app); + if( ex ) throw ex; + log.write("[DEBUG] No more services to commit\n"); + onDone(null, null); + } + } + + + function setPlatformVersionInService( app, thingyName, onDone ){ + if( typeof onDone != "function" ) throw TypeError("onDone"); + updateParent(); + function updateParent(){ + log.write("[DEBUG] "+ thingyName +" - Set platform version "+ app.platformSnapVersion +"\n"); + var child = child_process.spawn( + "mvn", ["versions:update-parent", "-DparentVersion="+ app.platformSnapVersion], + { cwd: workdirOfSync(app, jettyService) }, + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + return; + } + updateProperty(); + }); + } + function updateProperty( ex ){ + if( ex ) throw ex; + log.write("[DEBUG] "+ thingyName +" - Set parent.version "+ app.platformSnapVersion +"\n"); + var child = child_process.spawn( + "mvn", ["versions:set-property", "-Dproperty=parent.version", "-DnewVersion="+ app.platformSnapVersion], + { cwd: workdirOfSync(app, jettyService) }, + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + return; + } + onDone(); + }); + } + } + + function dropSlimFromAllJenkinsfiles( app, onDone ){ var iSvc = -1; var jettyServices; var jettyService; + incrNumTasks(app); getJettyServiceNamesAsArray(app, function( ex, jettyServices_ ){ - if( ex ){ throw ex; } + if( ex ) throw ex; jettyServices = jettyServices_; nextJettyService(); }); - function nextJettyService(){ + function nextJettyService( ex ){ + decrNumTasks(app); + if( ex ) throw ex; if( ++iSvc >= jettyServices.length ){ onNoMoreJettyServices(); return; } + incrNumTasks(app); jettyService = jettyServices[iSvc]; isWorktreeClean(app, jettyService, onIsWorktreeCleanRsp); } function onIsWorktreeCleanRsp( ex, isClean ){ if( ex ) throw ex; if( !isClean ){ - log.write("[WARN ] Worktree not clean. Will skip: "+ jettyService +"\n"); + log.write("[WARN ] Wont patch: Worktree not clean: "+ jettyService +"\n"); nextJettyService(); return; } @@ -163,126 +402,302 @@ function checkoutUpstreamDevelop( app, thingyName, onDone){ - var child; - child = child_process.spawn( - "sh", ["-c", "git checkout upstream/develop || git checkout origin/develop"], - { cwd: workdirOfSync(app, thingyName), }); - child.on("error", console.error.bind(console)); - child.stderr.on("data", function( buf ){ log.write(buf); }); - child.on("close", function( code, signal ){ - if( code !== 0 || signal !== null ){ - onDone(Error("code "+ code +", signal "+ signal)); - }else{ - onDone(null, null); - } - }); - } - - - function checkoutUpstreamDevelopForAllJettyServices( app, onDone){ - if( typeof onDone != "function" ) throw TypeError("onDone"); - var iSvc = -1, jettyServices, jettyService; - getJettyServiceNamesAsArray(app, function( ex, ret ){ - if( ex ) throw ex; - jettyServices = ret; - nextJettyService(); - }); - function nextJettyService( ex ){ - if( ex ) throw ex; - if( ++iSvc >= jettyServices.length ){ onDone(null, null); return; } - jettyService = jettyServices[iSvc]; - log.write("[DEBUG] git checkout "+ jettyService +"\n"); - checkoutUpstreamDevelop(app, jettyService, nextJettyService); + var iRemoteName = 0; + checkout(); + function checkout(){ + var remoteName = app.remoteNamesToTry[iRemoteName]; + if( remoteName === undefined ){ onDone(Error("No more remote names for "+ thingyName)); return; } + log.write("[DEBUG] git checkout "+ thingyName +" "+ remoteName +"/develop\n"); + var child = child_process.spawn( + "git", ["checkout", remoteName+"/develop"], + { cwd: workdirOfSync(app, thingyName), }); + child.on("error", console.error.bind(console)); + child.stderr.on("data", function( buf ){ log.write(buf); }); + child.on("close", function( code, signal ){ + if( !"TODO_GlACAIQoAgDMTwIAIh8CAOJvAgALLgIA" ){ + checkout(); /* try next remote name */ + }else if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + }else{ + onDone(null, null); + } + }); } } function fetchChangesFromGitit( app, thingyName, onDone ){ var child; - child = child_process.spawn( - "sh", ["-c", "git fetch upstream || git fetch origin"], - { cwd: workdirOfSync(app, thingyName), }); - child.on("error", console.error.bind(console)); - child.stderr.on("data", function( buf ){ log.write(buf); }); - child.on("close", function( code, signal ){ - if( code !== 0 || signal !== null ){ - onDone(Error("code "+ code +", signal "+ signal)); - }else{ + var iRemoteName = 0; + mkAppWorkdir(); + function mkAppWorkdir( ex ){ + if( ex ) throw ex; + fs.mkdir(app.workdir, {recursive:true}, checkRepoExists); + } + function checkRepoExists( ex ){ + if( ex ) throw ex; + fs.exists(workdirOfSync(app, thingyName) +"/.git", function( isLocalCloneExists ){ + isLocalCloneExists ? fetch() : clone(); + }); + } + function clone( ex ){ + if( ex ) throw ex; + log.write("[DEBUG] git clone "+ thingyName +"\n"); + var child = child_process.spawn( + "git", ["clone", "--no-single-branch", "--depth", "4", gitUrlOfSync(app, thingyName)], + { cwd: app.workdir }); + child.on("error", console.error.bind(console)); + child.stderr.on("data", function( buf ){ log.write(buf); }); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); return; + } onDone(null, null); - } - }); + }); + } + function fetch( ex ){ + if( ex ) throw ex; + var remoteName = app.remoteNamesToTry[iRemoteName++]; + if( remoteName === undefined ){ + onDone(Error("No more remotes to try for "+ thingyName)); return; } + log.write("[DEBUG] "+ thingyName +" - git fetch "+ remoteName +"\n"); + var child = child_process.spawn( + "git", ["fetch", remoteName], + { cwd: workdirOfSync(app, thingyName), }); + child.on("error", console.error.bind(console)); + child.stderr.on("data", function( buf ){ log.write(buf); }); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); return; + } + onDone(null, null); + }); + } } - function fetchChangesFromGititForAllJettyServices( app, onDone ){ - var iSvc = -1, jettyServices, jettyService; - getJettyServiceNamesAsArray(app, function( ex, ret ){ - if( ex ) throw ex; - jettyServices = ret; - nextJettyService(); - }); - function nextJettyService( ex ){ - if( ex ) throw ex; - if( ++iSvc >= jettyServices.length ){ onDone(null, null); return; } - jettyService = jettyServices[iSvc]; - log.write("[DEBUG] git fetch "+ jettyService +"\n"); - fetchChangesFromGitit(app, jettyService, nextJettyService); + function setVersionInPlatform( app, onDone ){ + if( typeof onDone != "function" ) throw TypeError("onDone"); + setVersion(); + function setVersion(){ + log.write("[DEBUG] platform - mvn versions:set "+ app.platformSnapVersion +"\n"); + var child = child_process.spawn( + "mvn", ["versions:set", "-DgenerateBackupPoms=false", "-DnewVersion="+app.platformSnapVersion], + { cwd: workdirOfSync(app, "platform"), } + ); + child.on("error", console.error.bind(console)); + child.stdout.on("data", noop); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + endFn(Error("code "+ code +", signal "+ signal)); + return; + } + endFn(); + }); + } + function endFn( ex, ret ){ + onDone(ex, ret); } } function patchAwaySlimPackagingInPlatform( app, onDone ){ + var onDoneCalledNTimes = 0; + incrNumTasks(app); isWorktreeClean(app, "platform", function( ex, isClean ){ - if( ex ){ throw ex; } - if( !isClean ){ onDone(Error("Platform worktree not clean")); return; } + if( ex ) throw ex; + if( !isClean ){ log.write("[WARN ] Skip platform patch: Worktree not clean\n"); + endFn(); return; } getDropSlimArtifactsTagInPlatformPatch(app, onPatchBufReady); }); function onPatchBufReady( ex, patch ){ - if( ex ){ throw ex; } + if( ex ) throw ex; + var stdoutBufs = []; var gitApply = child_process.spawn( - "sh", ["-c", "git apply"], + "git", ["apply"], { cwd: workdirOfSync(app, "platform"), }); gitApply.on("error", console.error.bind(console)); gitApply.stderr.on("data", logAsString); - gitApply.stdout.on("data", noop); + gitApply.stdout.on("data", stdoutBufs.push.bind(stdoutBufs)); gitApply.on("close", function( code, signal ){ - if( code !== 0 || signal !== null ){ throw Error(""+ code +", "+ signal +""); } - onDone(null, null); + if( code !== 0 || signal !== null ){ + for( var buf in stdoutBufs ){ log.write(buf.toString()); } + throw Error(""+ code +", "+ signal +""); + } + endFn(null, null); }); gitApply.stdin.write(patch); gitApply.stdin.end(); } + function endFn( ex, ret ){ + if( onDoneCalledNTimes !== 0 ){ throw Error("assert(onDoneCalledNTimes == 0)"); } + onDoneCalledNTimes += 1; + decrNumTasks(app); + onDone(ex, ret); + } + } + + + function incrNumTasks( app ){ + //if( app.numRunningTasks >= app.maxParallel ){ + // throw Error("assert(app.numRunningTasks < app.maxParallel)"); + //} + app.numRunningTasks += 1; + } + + + function decrNumTasks( app ){ + if( app.numRunningTasks <= 0 ) throw Error("assert(app.numRunningTasks > 0)"); + app.numRunningTasks -= 1; + } + + + function forEachJettyService( app, onService, onDone ){ + var iSvc = 0, services; + var isOnDoneCalled = false; + getJettyServiceNamesAsArray(app, onServicesArrived); + function onServicesArrived( ex, ret ){ + if( ex ) throw ex; + services = ret; + nextService(); + } + function nextService( ex ){ + if( ex ){ endFn(ex); return; } + var service = services[iSvc++]; + if( service === undefined ){ endFn(); return; } + onService(app, service, nextService); + } + function endFn( ex, ret ){ + if( isOnDoneCalled ){ + throw (ex) ? ex : Error("onDone MUST be called ONCE only"); + }else{ + isOnDoneCalled = true; + onDone(ex, ret); + } + } + } + + + function resetHardToDevelop( app, thingyName, onDone ){ + if( typeof onDone !== "function" ) throw Error("onDone"); + var iRemoteName = 0; + tryResetHard(iRemoteName++); + function tryResetHard( i ){ + var remoteName = app.remoteNamesToTry[i]; + if( remoteName === undefined ){ onDone(Error("no usable remote found")); return; } + log.write("[DEBUG] "+ thingyName +"$ git reset --hard "+ remoteName +"/develop\n"); + var child = child_process.spawn( + "git", ["reset", "--hard", remoteName +"/develop"], + { cwd:workdirOfSync(app, thingyName) } + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + }else if( code !== 0 ){ + tryResetHard(iRemoteName++); + }else{ + deleteBranch(); + } + }); + } + function deleteBranch( ex ){ + if( ex ) throw ex; + log.write("[DEBUG] "+ thingyName +"$ git branch --delete --force "+ app.branchName +"\n"); + var child = child_process.spawn( + "git", ["branch", "--delete", "--force", app.branchName], + { cwd:workdirOfSync(app, thingyName) } + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( code == 1 ){ /* assume branch doesnt exist*/ + log.write("[INFO ] Ignore: Failed to delete branch '"+ app.branchName +"' in '" + + thingyName +"'.\n"); + endFn(null, null); + }else if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + }else{ + endFn(null, null); + } + }); + } + function endFn( ex, ret ){ + onDone(ex, ret); + } } function run( app ){ - patchAwaySlimPackagingInPlatform(app, onPatchAwaySlimPackagingInPlatformDone); - function onPatchAwaySlimPackagingInPlatformDone( ex, ret ){ - if( ex ){ log.write("[WARN ] "+ ex.message +"\n"); /*throw ex;*/ } - fetchChangesFromGititForAllJettyServices(app, + if( app.isResetHardToDevelop ){ + forEachJettyService(app, resetHardToDevelop, endFn); + return; + } + updateFromRemote(); + function updateFromRemote( ex ){ + if( ex ) throw ex; + forEachJettyService(app, fetchChangesFromGitit, onFetchChangesFromGititForAllJettyServicesDone); } function onFetchChangesFromGititForAllJettyServicesDone( ex ){ - if( ex ){ throw ex; } - checkoutUpstreamDevelopForAllJettyServices(app, - onCheckoutUpstreamDevelopForAllJettyServicesDone); + if( ex ) throw ex; + forEachJettyService(app, checkoutUpstreamDevelop, + onCheckoutUpstreamDevelopDone); } - function onCheckoutUpstreamDevelopForAllJettyServicesDone( ex ){ + function onCheckoutUpstreamDevelopDone( ex ){ if( ex ) throw ex; + patchAwaySlimPackagingInPlatform(app, onPatchAwaySlimPackagingInPlatformDone); + } + function onPatchAwaySlimPackagingInPlatformDone( ex, ret ){ + if( ex ) throw ex; + setVersionInPlatform(app, onSetVersionInPlatformDone); + } + function onSetVersionInPlatformDone(){ dropSlimFromAllJenkinsfiles(app, onDropSlimFromAllJenkinsfilesDone); } function onDropSlimFromAllJenkinsfilesDone( ex ){ - if( ex ){ throw ex; } + if( ex ) throw ex; + forEachJettyService(app, setPlatformVersionInService, onSetPlatformVersionInServiceDone); + } + function onSetPlatformVersionInServiceDone( ex ){ + if( ex ) throw ex; + if( app.isPush || app.isPushForce ){ + commitAllServices(app, onCommitAllServicesDone); + }else{ + log.write("[DEBUG] Skip commit/push (disabled)\n"); + endFn(); + } + } + function onCommitAllServicesDone( ex ){ + if( ex ) throw ex; + if( !app.isPush && !app.isPushForce ) throw Error("assert(isPush || isPushForce)"); + forEachJettyService(app, pushService, endFn); + } + function endFn( ex ){ + if( ex ) throw ex; log.write("[INFO ] App done\n"); } } function main(){ - const app = Object.seal({ + const app = { isHelp: false, + isPrintIsaVersion: false, + isPush: false, + isPushForce: false, + isResetHardToDevelop: false, + remoteNamesToTry: ["origin"], + platformSnapVersion: null, + workdir: "C:/work/tmp/git-scripted", maxParallel: 1, - }); + numRunningTasks: 0, + branchName: "SDCISA-15648-RemoveSlimPackaging-n1", + commitMsg: "[SDCISA-15648] Remove slim packaging", + }; + app.platformSnapVersion = "0.0.0-"+ app.branchName +"-SNAPSHOT"; if( parseArgs(process.argv, app) !== 0 ){ process.exit(1); } if( app.isHelp ){ printHelp(); return; } run(app); -- cgit v1.1 From 37af455dff71ab08070e48f6a848e326a52902ca Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 24 Apr 2024 21:33:01 +0200 Subject: Add a standalone container_of impl. Fix assert_is template. --- doc/note/qemu/qemu.txt | 2 +- src/main/c/common/assert_is.h | 4 ++-- src/main/c/common/offset_of.h | 9 +++++++++ 3 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 src/main/c/common/offset_of.h diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 5ee13b2..395d11d 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -245,7 +245,7 @@ NOTE: Couldn't yet test any of those commands. && apk add openssh-server \ && rc-update add sshd \ && sed -i -E 's;^# *(PermitRootLogin).+$;\1 yes;' /etc/ssh/sshd_config \ - && sed -i -E 's;^# *(http://dl-cdn.alpinelinux.org/alpine/v[^/]+/community)$;\1;' /etc/apk/repositories \ + && sed -i -E 's;^# *(http://dl-cdn.alpinelinux.org/alpine/v.*?/community)$;\1;' /etc/apk/repositories \ && mkdir /home/user && chown 1000:1000 /home/user && chmod 755 /home/user \ && printf 'user:x:1000:1000:user:/home/user:/bin/ash\n' >> /etc/passwd \ && printf 'user:x:1000:user\n' >> /etc/group \ diff --git a/src/main/c/common/assert_is.h b/src/main/c/common/assert_is.h index b6e3132..316bf02 100644 --- a/src/main/c/common/assert_is.h +++ b/src/main/c/common/assert_is.h @@ -2,7 +2,7 @@ #if !NDEBUG #define TPL_assert_is(T, PRED) static inline T*assert_is_##T(void*p,\ const char*f,int l){if(p==NULL){fprintf(stderr,"assert(" STR_QUOT(T)\ -" != NULL) %s:%d\n",f,l);abort();}T*obj=p;if(!PRED){fprintf(stderr,\ +" != NULL) %s:%d\n",f,l);abort();}T*obj=p;if(!(PRED)){fprintf(stderr,\ "ssert(type is \""STR_QUOT(T)"\") %s:%d\n",f,l);abort();}return p; } #else #define TPL_assert_is(T, PRED) static inline T*assert_is_##T(void*p,\ @@ -20,7 +20,7 @@ struct Person { }; /* instantiate a checker */ -TPL_assert_is(Person, !strcmp(obj->tYPE, "Hi, I'm a Person")); +TPL_assert_is(Person, !strcmp(obj->tYPE, "Hi, I'm a Person")) #define assert_is_Person(p) assert_is_Person(p, __FILE__, __LINE__) /* make sure magic is initialized (ALSO MAKE SURE TO PROPERLY INVALIDATE diff --git a/src/main/c/common/offset_of.h b/src/main/c/common/offset_of.h new file mode 100644 index 0000000..7d9179d --- /dev/null +++ b/src/main/c/common/offset_of.h @@ -0,0 +1,9 @@ +#ifndef INCGUARD_yisgKqALPG4lfEqb +#define INCGUARD_yisgKqALPG4lfEqb + + +#define container_of(P, T, M) \ + ((T*)( ((size_t)P) - ((size_t)((char*)&((T*)0)->M - (char*)0) ))) + + +#endif /* INCGUARD_yisgKqALPG4lfEqb */ -- cgit v1.1 From 5bcc0eef6f3097706bf83655a44d2dd94e22c8b8 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 25 Apr 2024 17:39:42 +0200 Subject: Begun ocexec.c helper. Update maven doc. Enhance slim helper script. --- doc/note/links/links.txt | 2 + doc/note/maven/maven.txt | 6 +- src/main/c/common/windoof.h | 17 ++ src/main/c/postshit/launch/openshift/ocexec.c | 81 ++++++ src/main/nodejs/paisa-nonslim/foo.js | 341 ++++++++++++++++---------- 5 files changed, 318 insertions(+), 129 deletions(-) create mode 100644 src/main/c/common/windoof.h create mode 100644 src/main/c/postshit/launch/openshift/ocexec.c diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 53ea66e..838cc1d 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -536,6 +536,8 @@ Links (Aka argument amplifiers) ## security - [SDCISA-4808] Security flaw due to no-security-if-no-identity policy Open - [Make ISA secure (again?)](https://wikit.post.ch/x/n984Mg) +- [platform public IP exposed wont fix](https://jira.post.ch/browse/SDCISA-6998) +- [houston public IP exposed](TODO) ## Bus Factor Faktor - [Bus factor definition wikipedia](https://en.wikipedia.org/wiki/Bus_factor) diff --git a/doc/note/maven/maven.txt b/doc/note/maven/maven.txt index 10a606a..4349551 100644 --- a/doc/note/maven/maven.txt +++ b/doc/note/maven/maven.txt @@ -4,13 +4,13 @@ Maven mvn dependency:help -Ddetail=true -mvn versions:set -DgenerateBackupPoms=false -DnewVersion= +mvn versions:set -DgenerateBackupPoms=false -DallowSnapshots=true -DnewVersion= mvn versions:set -DgenerateBackupPoms=false -DnextSnapshot -mvn versions:update-parent -DparentVersion= +mvn versions:update-parent -DgenerateBackupPoms=false -DallowDowngrade=true -DallowSnapshots=true -DforceUpdate=true -DskipResolution=true -DparentVersion=YOUR_VERSION -mvn versions:set-property -Dproperty=foo.bar -DnewVersion=gugus +mvn versions:set-property -DgenerateBackupPoms=false -DallowSnapshots=true -Dproperty=foo.bar -DnewVersion=gugus export MAVEN_OPTS="..." diff --git a/src/main/c/common/windoof.h b/src/main/c/common/windoof.h new file mode 100644 index 0000000..c156cb7 --- /dev/null +++ b/src/main/c/common/windoof.h @@ -0,0 +1,17 @@ + +#if 0 +# include +#else + + + + + + + + + + + + +#endif /*manual windoof on/off switch*/ diff --git a/src/main/c/postshit/launch/openshift/ocexec.c b/src/main/c/postshit/launch/openshift/ocexec.c new file mode 100644 index 0000000..49e68b4 --- /dev/null +++ b/src/main/c/postshit/launch/openshift/ocexec.c @@ -0,0 +1,81 @@ +/* + +SH: true \ +SH: && `# Configure` \ +SH: && CC=x86_64-w64-mingw32-cc \ +SH: && MKDIR_P="mkdir -p" \ +SH: && CFLAGS="-Wall -Werror -pedantic -O0 -g -Isrc/main/c/common -DPROJECT_VERSION=0.0.0-$(date -u +%s) -fmax-errors=1 -Wno-error=unused-variable" \ +SH: && LDFLAGS="-Wl,--gc-sections,--as-needed" \ +SH: && `# Make` \ +SH: && ${MKDIR_P:?} build/bin \ +SH: && ${CC:?} -o build/bin/ocexec ${CFLAGS:?} src/main/c/postshit/launch/openshift/ocexec.c ${LDFLAGS:?} \ +SH: && true + +*/ + +#include +#include +#include +#if __WIN32 +# include +#endif + +#define LOGERR(...) fprintf(stderr, __VA_ARGS__) +#if !NDEBUG +# define REGISTER +#else +# define REGISTER register +#endif + +#define FLG_isHelp (1<<0) + + +typedef struct App App; + + +struct App { + int flg; + char const *ocNamespace; +}; + + +static void printHelp( void ){ + printf(" \n" + " TODO write help page\n" + " \n"); +} + + +static int parseArgs( int argc, char**argv, App*app ){ + REGISTER int err; + int iArg = 1; + if( argc <= 1 ){ LOGERR("EINVAL: Luke.. use arguments!\n"); return-1; } +nextArg:; + char const *arg = argv[iArg++]; + if( arg == NULL ) goto verifyArgs; + if( !strcmp(arg,"--help") ){ + app->flg |= FLG_isHelp; + }else if( !strcmp(arg,"-n") || !strcmp(arg,"--namespace") ){ + arg = argv[iArg++]; + if( arg == NULL ){ LOGERR("EINVAL: %s needs value\n", argv[iArg-2]); return-1; } + app->ocNamespace = arg; + }else{ + LOGERR("EINVAL: %s\n", arg); + } + goto nextArg; +verifyArgs: + return 0; +} + + +int main( int argc, char**argv ){ + REGISTER int err; + App app = {0}; assert((void*)0 == NULL); + #define app (&app) + if( !parseArgs(argc, argv, app) ){ err = -1; goto endFn; } + if( app->flg & FLG_isHelp ){ printHelp(); err = 0; goto endFn; } +endFn: + return !!err; + #undef app +} + diff --git a/src/main/nodejs/paisa-nonslim/foo.js b/src/main/nodejs/paisa-nonslim/foo.js index 2ce8e68..ab17a86 100644 --- a/src/main/nodejs/paisa-nonslim/foo.js +++ b/src/main/nodejs/paisa-nonslim/foo.js @@ -17,16 +17,21 @@ +" \n" +" Options:\n" +" \n" - +" --default\n" - +" Perform default action (whatever default means).\n" + +" --fetch\n" + +" Update local repos from remote.\n" +" \n" - +" --print-isaVersion\n" - +" Print a preflux isaVersion to stdout filled with the patched\n" - +" services.\n" + +" --reset-hard\n" + +" Reset worktree to develop.\n" +" \n" - +" --reset-hard-to-develop\n" - +" Resets all the services back to develop. WARN if you've uncommitted\n" - +" work in some of those repos, IT WILL BE LOST!\n" + +" --patch-platform\n" + +" Remove slim packaging from patform and set snapshot version.\n" + +" \n" + +" --patch-services\n" + +" Disable slim packaging in Jenkinsfile and use platform snapshot in\n" + +" pom.\n" + +" \n" + +" --commit\n" + +" Create a git commit with our changes.\n" +" \n" +" --push | --push-force\n" +" Create commits for patched services and push them to upstream. If\n" @@ -44,27 +49,32 @@ function parseArgs( argv, app ){ - var hasArgs = false; + if( argv.length <= 2 ){ + log.write("EINVAL: Refuse to produce damage with zero args.\n"); + return -1; + } for( var iA = 2 ; iA < argv.length ; ++iA ){ var arg = argv[iA]; if( arg == "--help" ){ app.isHelp = true; return 0; - }else if( arg == "--default" ){ - hasArgs = true; + }else if( arg == "--fetch" ){ + app.isFetch = true; + }else if( arg == "--reset-hard" ){ + app.isResetHard = true; + }else if( arg == "--patch-platform" ){ + app.isPatchPlatform = true; + }else if( arg == "--patch-services" ){ + app.isPatchServices = true; + }else if( arg == "--commit" ){ + app.isCommit = true; }else if( arg == "--push" ){ if( app.isPushForce ){ log.write("EINVAL: only one of push and push-force allowed\n"); return-1; } app.isPush = true; - hasArgs = true; }else if( arg == "--push-force" ){ if( app.isPush ){ log.write("EINVAL: only one of push and push-force allowed\n"); return-1; } app.isPushForce = true; - hasArgs = true; - }else if( arg == "--print-isaVersion" ){ - app.isPrintIsaVersion = true; - hasArgs = true; }else if( arg == "--reset-hard-to-develop" ){ app.isResetHardToDevelop = true; - hasArgs = true; //}else if( arg == "--max-parallel" ){ // arg = argv[++iA]; // if( !/^[0-9]+$/.test(arg) ){ log.write("EINVAL: --max-parallel "+ arg +"\n"); return -1; } @@ -74,10 +84,6 @@ return -1; } } - if( !hasArgs ){ - log.write("EINVAL: Refuse to produce damage with zero args.\n"); - return -1; - } return 0; } @@ -97,7 +103,7 @@ function gitUrlOfSync( app, thingyName ){ if( !isThingyNameValid(app, thingyName) ) throw TypeError(thingyName); - return "https://example.com/scm/isa/"+ thingyName +".git"; + return "https://gitit.post.ch/scm/isa/"+ thingyName +".git"; } @@ -146,28 +152,36 @@ * "poms/service/paisa-service-superpom/pom.xml" as described in * SDCISA-15648 */ var patch = "" - +"tVnNcuMoEL7nKbTXzUKc2WRmQk1tZav2Ppd9AYRaEgoCCpBj79NvI0u2E9uJxUg+CJDpr4Fu+k+F" - +"LMuMkEqGjN9Z0/o7D24tBdxZLj0nw4j4zoLD/+McumlVlk+ZfSN1AZssL+9L8fj9cUXpt+JhxfFR" - +"Zver1deHhxtCyLQV3Nze3k5cxfNzRp5W9398f8puY/ste36+yT77/birDFf+rytmCqNLWXWOB2n0" - +"FQQ9kXUG1xkkIAtyFUmk8kq2fyNVyUWYQhh/3FiulORaADvq//iNkH9//vOTZRrW4LJCOhBBbbPO" - +"Q5FJnVnFQ2lcS8hEhqKm1vhAZaC9iChXMvJWvZgGce0ODxsHC6AXpuVSL4WOMhTg/VLwylSV1Iud" - +"zSvkCdDGVZR7iyrSsKF9BY6K0+uR49rLINcw6k++zbx1cRe7yX6yFp1h6MKCSotWo+Q+gEPbwYTi" - +"3rc8wMXd1TIHp3EGWePxFjwYl8RTG1DSeOrABwWBnXlHYRNolB72L66naaWWm6WWMDvbg6Iit9Zo" - +"GjWS7fq9dpIWAsdj5ZP3gwie5MB1F6Ty7OTNAkfYMxCmAMHejFKBlEKtRqfi2Zl3aaClVNBZZXjB" - +"Tl+lQUrDDt00CMV1xY4HiTA7i8nejWcFI9zK2QHZ0xNdobcCT7QJBDbSh2lMNNoHX1KoBRc1sKFN" - +"MPAj0H+tcbZmQzv3ZYlWfW866d50sjPm9FeAG77mG4qBlscDhRj5HBg0lpMv9H66PHuPZOPx0joE" - +"i+K0RoMOnvVDJbE/K2Kak76I18p2ajxx7BTxUMWLH031MCBco+LyNMP0GXZCAPQ5aPQpOWYocwBH" - +"rLC1b8HjC9J49+f96nJsVEOhjHhJuj9VDE5K6WuKoRzbqTqoaTgj1Z58+nXYUR7kz96/SLKY1Hea" - +"YpYj1yPoC3eBH71KWWUUWhQ6wm3y9HW9hUlTz1Mc2dqJ0ota0OTGezr6ln40epZFg9bI26vyoWGN" - +"UMRgFE764XKheb/b3t2wXTNRA0zBSUDbx/a9JM/YxAuuJN64obNELF4ZUymgMXqkJWpI3lWeRWOy" - +"epwGJg2tZKi7vPeFBIWOa0QkPzEoPRJ3p0gw54U9i4wHzzUocWyRa/9MzVcvAyY5/zFBOur3uRkK" - +"3wgZtksES2d4XmRzJnFL4GmQl1kw4RsqExj8DuleeoD9AWjMl9E0zYPpt+j5W+LRr3V+3nVimuuk" - +"mAkTheSIgwqDXpfqgs/hrqGWQqFi6Rdizet851pzBwUap7Ws5lzvESDezDmRY1kRE4pYQCUmj0qW" - +"iH6wq/0zEaAvsZWOt/Bq3AsbS27GflaVw8ixXYs053+B6a7ONxterNdcrtUctjHnHoRB4W4SkriP" - +"8eK3CWvcnLgp7vACGGwsOgs/ObL+ADLA1DLGB2Ao4zmxUO1TMoghYI4pyaH7SzWEMWpgn4YP0OZQ" - +"FGgom3YT4oTEEHpHTEc4doKbvJehtLUvcVlj1JcF9xND8igX4bo293RMp9k+r97Je2bQtKJa3inV" - +"47Yo52LLDt3Ee9w4g8kj2zXTU4xQm66qQ7wWnm4wUgDesqGd8Eny7t03ydurKd8SngBdyf74Q+qV" - +"fE3nBFw5O/4wMUer3RsAqjGhigVz+vvN/wsTxCQ=" + +"tVrdb9s2EH/PX8EZ2OosIe2kadOw7Zaia4sM3Vo0fdhDgYGSKJkOJQok7dgr+r/vqA9/NHZisrIf" + +"IpIif3e8O94HlUSkKcI4ExaxQalyMzBcT0XMByUThuGmh82k5BreuzlklksU+cw+EEXCZyhKT9L4" + +"ybMnQ0LO2fmz6On56TN0Mhw+PTs7wBj7cXBwdHTkycXlJcIXw5Pjk5PH6Mg1LtDl5QF66PdikCkm" + +"zW87zIxVkYpsopkVqthhQbWo1AoYtYIDCbzTErfKSJG/glUpi63PQvdjqmRSClbEnK60X/yE8ecP" + +"f3ygqOBTrlEiNI+tnKOJ4QkSBSols6nSOcaeBOMRKZWxRFhS6YgwKRxtWemp0VctPHhovgf0ROVM" + +"FPtCBx3G3Jh9wUuVZaLYm2xueRQArXRGmCnBRMa0ed5yBoZT2ZFmhRFWTHlrP9EcmVK7XdSTjbcV" + +"bSCo7R6NFtxGyozlGpwHjSUzJmeWb93dSERcFzADT0G8CbNKB9EsFJdCGaK5sZJbumGM8JklTnvQ" + +"3srPOBeFmO2Lhc7JLg0VqOWqIM4iad2urBPn3DIQK/PeDyAYHHFWTKyQht4Z2YMIKwKxSnhM13qh" + +"QFKCVUNQMXTDWBhoKiSflFKxhN4dCoMUii6bYRCSFRld7QTC1B6TftfvFAyzUnQOSC8uyBCiFTe4" + +"UBbzmTDWj0gB/sGkhI9iFo84bZ4BDr4F+i9XuhzR5tn1YXFefeE6ycJ10g3u9EeAx2zKZgQSLQMC" + +"5S7zWRIYlwyfkhN/fVYRqXTiJSNrS1BnqQpeWEOrrhTQ7hQxLEhvxctF7ptPrAZFEGp8Y1pX3XQw" + +"K8BwWZhjegg7IAF6GNTFlAhKlC6AHZadl+vgbgCPjX58MtyeG414IlV8E3R+MpecpMKMCKRytDZ1" + +"Lv1w2lWL5f7HoV651D/9fiDIYxIzKQhUOWLagt4wbdnKUAiXTmlO6QA3i8L5WocJM8+7OCIvPbXn" + +"rGAcKWNIG1uqXhtZ9pq0OtpGpmdjOo4lVpCF46q7v9S82m0Vbmj98LQAlTBswffRRSsoMo7dAZcC" + +"TlzT2EcunimVSU5c9khSsJBokhnqnMnwiR+YUCQTdjSJqliIQenAIyAZz6R0Rd0Tia3arOxOdNxE" + +"rsaI3ROoVn9D69XtgEHBvy2QVtpVbQbKV7Gw830kSxtobiWzoXALoKmAltpjwdfcTEDy25R74Qn2" + +"PaCuXgbX1A2mmUPkz7GBuDYx3fIJZa4WcUeYoCSNNc8g6dWhIXgT7pSPRCzBsIobXKrb7uQ6Ypon" + +"4JymIuuS3xVAOJldIrtrRSgo3AUqVpEzskD0pV+t/gYCVFdsqWY5v1X6hrZXbqp86FYOMsd8GocF" + +"/y1E63u+zvDcfc32u5rlNrrcQ6xAubOAIu5+PPdxolS6S9yQcLgFjM9KCBbGO7O+B9Jy32uMe8BA" + +"x11igdmHVBBNwuxKkmXzh+4Q2qyBPpg+8DziSQKOcpzPrJsQmELXi0kLR+/gBu+ludpaXHGVSsnT" + +"Pe7HpeROL7Ge5JEhbTlNF3V1re+OQcMu1aKJlBVuDnpO5nTZDDzHY62geKT1w7/EsCM1yUbWHQtD" + +"ZpApcJbT5unxSXLw3TfJo51Xri+8A7Qjee8PqS+MmuiYe1gaFObgtSsHQAooqNyFOfn1AO+O8JHZ" + +"EXL7ey+iqv2yGjIk47YPOxhDyAQDETIhdb2s9Bwdod4AystB7/D57pRWiBCr3gKn/UOS3wCq6Tuc" + +"3ZESnqI1lQDTS2GTtVfgZ6Ww/d6XL+bIi113PK0s+r13Wk1KCoC8pmUViji6fn/1Vw4eCvVAGmsU" + +"D31o1PJlC245OCv0FbEU4V8y68FuK5cbPgdpsJRkju2rxOmKOh5hqCVz5Xm1WIki7feaPf+Ofsbn" + +"Q9M7dsQ8dut+IkX9df24DARSVtN3YIfoqx/egj2nKYQroaE5N0CFW2RiMAqnsEcg6akAh/4IsSIB" + +"J1PO0Z+vPrlXlmkw9cqcD3ueu3E/kGtN5iXqtVR6/jDV2XN8vdUqrzVYHWar3Ju+j+XegfysAHD1" + +"+EE2peSU91siBbj7EAqDQSv7lnPg99q6wNY/dJbXasQZYM3JyoQAYTuv4UymnC8oHjfAx+jagm6Z" + +"Tl5D/0Ppsh/y6c3H969ev/n3zT9X15+v/n7nu8lviEvDOzHKQvla17fdp+84FSJZE2F2ioVtPIKd" + +"APuqBCs6O32+BNmBns9/IL0Y8BmPJw/MXJkFcfV/3mNHXg==" ; patch = Buffer.from(patch, 'base64'); patch = zlib.inflateRaw(patch, function( ex, patch ){ @@ -179,12 +193,7 @@ function getJettyServiceNamesAsArray( app, onDone ){ setImmediate(onDone, null, [ /*TODO get via args/file */ - "allitnil", "babelfish", "barman", - //"benjy", "bentstick", "blart", "captain", "caveman", - //"colin", "deep", "drdan", "guide", "heimdall", "hooli", "jeltz", "kwaltz", "lazlar", - //"loon", "magician", "megacamel", "minetti", "mown", "neutron", "nowwhat", "pobble", - //"poodoo", "prosser", "rob", "slarti", "streetmentioner", "thor", "towel", "trillian", - //"vannharl", "vogon", "vroom", "zaphake", "zem", + TODO_GX0CAJ9hAgCNRAIA9hgCAP5jAgDGCgIA ]); } @@ -235,7 +244,7 @@ } log.write("[DEBUG] "+ thingyName +"$ git add Jenkinsfile\n"); var child = child_process.spawn( - "git", ["add", "Jenkinsfile"], + "git", ["add", "--", "."], { cwd:workdirOfSync(app, thingyName) } ); child.on("error", console.error.bind(console)); @@ -262,9 +271,8 @@ child.on("exit", function( code, signal ){ if( code !== 0 || signal !== null ){ var stdoutStr = ""; - for( var buf in stdoutBufs ){ stdoutStr += buf.toString(); } - if( stdoutStr.length ){ log.write(stdoutStr); } - endFn(Error("code="+ code +", signal="+ signal +"")); + for( var buf in stdoutBufs ){ log.write(buf.toString()); } + endFn(Error("code="+ code +", signal="+ signal)); return; } createBranch(); return; @@ -320,14 +328,39 @@ } + function giveServiceOurSpecialVersion( app, thingyName, onDone ){ + if( typeof onDone != "function" ) throw TypeError("onDone"); + doit(); + function doit( ex ){ + if( ex ) throw ex; + var child = child_process.spawn( + "mvn", ["versions:set", "-DgenerateBackupPoms=false", "-DallowSnapshots=true", + "-DnewVersion="+ app.serviceSnapVersion], + { cwd: workdirOfSync(app, thingyName) }, + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + return; + } + onDone(); + }); + } + } + + function setPlatformVersionInService( app, thingyName, onDone ){ if( typeof onDone != "function" ) throw TypeError("onDone"); updateParent(); function updateParent(){ - log.write("[DEBUG] "+ thingyName +" - Set platform version "+ app.platformSnapVersion +"\n"); + log.write("[DEBUG] "+ thingyName +" - Set platform version "+ app.parentVersion +"\n"); var child = child_process.spawn( - "mvn", ["versions:update-parent", "-DparentVersion="+ app.platformSnapVersion], - { cwd: workdirOfSync(app, jettyService) }, + "mvn", ["versions:update-parent", "-DgenerateBackupPoms=false", "-DallowDowngrade=true", + "-DallowSnapshots=true", "-DforceUpdate=true", "-DskipResolution=true", + "-DparentVersion="+app.parentVersion], + { cwd: workdirOfSync(app, thingyName) }, ); child.on("error", console.error.bind(console)); child.stderr.on("data", logAsString); @@ -341,10 +374,11 @@ } function updateProperty( ex ){ if( ex ) throw ex; - log.write("[DEBUG] "+ thingyName +" - Set parent.version "+ app.platformSnapVersion +"\n"); + log.write("[DEBUG] "+ thingyName +" - Set parent.version "+ app.parentVersion +"\n"); var child = child_process.spawn( - "mvn", ["versions:set-property", "-Dproperty=parent.version", "-DnewVersion="+ app.platformSnapVersion], - { cwd: workdirOfSync(app, jettyService) }, + "mvn", ["versions:set-property", "-DgenerateBackupPoms=false", "-DallowSnapshots=true", + "-Dproperty=platform.version", "-DnewVersion="+ app.parentVersion], + { cwd: workdirOfSync(app, thingyName) }, ); child.on("error", console.error.bind(console)); child.stderr.on("data", logAsString); @@ -527,8 +561,10 @@ } endFn(null, null); }); - gitApply.stdin.write(patch); - gitApply.stdin.end(); + setTimeout/*TODO why?*/(function(){ + gitApply.stdin.write(patch); + gitApply.stdin.end(); + }, 42); } function endFn( ex, ret ){ if( onDoneCalledNTimes !== 0 ){ throw Error("assert(onDoneCalledNTimes == 0)"); } @@ -553,38 +589,57 @@ } - function forEachJettyService( app, onService, onDone ){ - var iSvc = 0, services; + function forEachInArrayDo( app, array, onService, onDone ){ + var iE = 0; var isOnDoneCalled = false; - getJettyServiceNamesAsArray(app, onServicesArrived); - function onServicesArrived( ex, ret ){ - if( ex ) throw ex; - services = ret; - nextService(); - } - function nextService( ex ){ + nextElem(); + function nextElem( ex ){ if( ex ){ endFn(ex); return; } - var service = services[iSvc++]; - if( service === undefined ){ endFn(); return; } - onService(app, service, nextService); + if( iE >= array.length ){ endFn(); return; } + onService(app, array[iE++], nextElem); } - function endFn( ex, ret ){ + function endFn( ex ){ if( isOnDoneCalled ){ throw (ex) ? ex : Error("onDone MUST be called ONCE only"); }else{ isOnDoneCalled = true; - onDone(ex, ret); + onDone(ex); } } } + function forEachJettyService( app, onService, onDone ){ + getJettyServiceNamesAsArray(app, onServicesArrived); + function onServicesArrived( ex, services ){ + if( ex ) throw ex; + forEachInArrayDo(app, services, onService, onDone); + } + } + + function resetHardToDevelop( app, thingyName, onDone ){ - if( typeof onDone !== "function" ) throw Error("onDone"); var iRemoteName = 0; - tryResetHard(iRemoteName++); - function tryResetHard( i ){ - var remoteName = app.remoteNamesToTry[i]; + if( typeof onDone !== "function" ) throw Error("onDone"); + detach(); + function detach(){ + log.write("[DEBUG] "+ thingyName +"$ git checkout --detach\n"); + var child = child_process.spawn( + "git", ["checkout", "--detach"], + { cwd:workdirOfSync(app, thingyName) } + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); + }else{ + tryResetHard(); + } + }); + } + function tryResetHard(){ + var remoteName = app.remoteNamesToTry[iRemoteName++]; if( remoteName === undefined ){ onDone(Error("no usable remote found")); return; } log.write("[DEBUG] "+ thingyName +"$ git reset --hard "+ remoteName +"/develop\n"); var child = child_process.spawn( @@ -597,7 +652,23 @@ if( signal !== null ){ onDone(Error("code "+ code +", signal "+ signal)); }else if( code !== 0 ){ - tryResetHard(iRemoteName++); + tryResetHard(); /*try next remoteName*/ + }else{ + wipeWorktree(); + } + }); + } + function wipeWorktree(){ + log.write("[DEBUG] "+ thingyName +"$ git rimraf\n"); + var child = child_process.spawn( + "git", ["rimraf"/*TODO make portable*/], + { cwd:workdirOfSync(app, thingyName) } + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", function( code, signal ){ + if( code !== 0 || signal !== null ){ + onDone(Error("code "+ code +", signal "+ signal)); }else{ deleteBranch(); } @@ -630,54 +701,65 @@ } - function run( app ){ - if( app.isResetHardToDevelop ){ - forEachJettyService(app, resetHardToDevelop, endFn); - return; - } - updateFromRemote(); - function updateFromRemote( ex ){ - if( ex ) throw ex; - forEachJettyService(app, fetchChangesFromGitit, - onFetchChangesFromGititForAllJettyServicesDone); - } - function onFetchChangesFromGititForAllJettyServicesDone( ex ){ - if( ex ) throw ex; - forEachJettyService(app, checkoutUpstreamDevelop, - onCheckoutUpstreamDevelopDone); - } - function onCheckoutUpstreamDevelopDone( ex ){ - if( ex ) throw ex; - patchAwaySlimPackagingInPlatform(app, onPatchAwaySlimPackagingInPlatformDone); - } - function onPatchAwaySlimPackagingInPlatformDone( ex, ret ){ + function setPlatformVersionInAllServices( app, onDone ){ + forEachJettyService(app, setPlatformVersionInService, onDone); + } + + + function fetchRemoteChanges( app, onDone ){ + var platformAndServices = app.services.slice(0); + platformAndServices.unshift("platform"); + forEachInArrayDo(app, platformAndServices, fetchChangesFromGitit, onDone); + } + + + function fetchListOfServices( app, onDone ){ + getJettyServiceNamesAsArray(app, function( ex, ret ){ if( ex ) throw ex; - setVersionInPlatform(app, onSetVersionInPlatformDone); - } - function onSetVersionInPlatformDone(){ - dropSlimFromAllJenkinsfiles(app, onDropSlimFromAllJenkinsfilesDone); + app.services = ret; + onDone(); + }); + } + + + function run( app ){ + var actions = [ fetchListOfServices ]; + if( app.isFetch ){ actions.push(fetchRemoteChanges); } + if( app.isResetHard ){ + actions.push(function( app, onDone ){ + forEachInArrayDo(app, app.services, checkoutUpstreamDevelop, onDone); + }); + actions.push(function( app, onDone ){ + forEachInArrayDo(app, app.services, resetHardToDevelop, onDone); + }); } - function onDropSlimFromAllJenkinsfilesDone( ex ){ - if( ex ) throw ex; - forEachJettyService(app, setPlatformVersionInService, onSetPlatformVersionInServiceDone); + if( app.isPatchPlatform ){ + actions.push(patchAwaySlimPackagingInPlatform); + actions.push(setVersionInPlatform); } - function onSetPlatformVersionInServiceDone( ex ){ - if( ex ) throw ex; - if( app.isPush || app.isPushForce ){ - commitAllServices(app, onCommitAllServicesDone); - }else{ - log.write("[DEBUG] Skip commit/push (disabled)\n"); - endFn(); - } + if( app.isPatchServices ){ + actions.push(dropSlimFromAllJenkinsfiles); + actions.push(function( app, onDone ){ + forEachInArrayDo(app, app.services, giveServiceOurSpecialVersion, onDone); + }); } - function onCommitAllServicesDone( ex ){ - if( ex ) throw ex; - if( !app.isPush && !app.isPushForce ) throw Error("assert(isPush || isPushForce)"); - forEachJettyService(app, pushService, endFn); + if( app.isCommit ) actions.push(function( app, onDone ){ + forEachInArrayDo(app, app.services, commitService, onDone); + }); + if( app.isPush || app.isPushForce ){ + actions.push(function( app, onDone ){ + forEachJettyService(app, pushService, onDone); + }); } - function endFn( ex ){ - if( ex ) throw ex; + actions.push(function( app, onDone ){ log.write("[INFO ] App done\n"); + }); + triggerNextAction(); + function triggerNextAction( ex ){ + if( ex ) throw ex; + var action = actions.shift(); + if( action === undefined ){ endFn(); return; } + action(app, triggerNextAction); } } @@ -685,19 +767,26 @@ function main(){ const app = { isHelp: false, - isPrintIsaVersion: false, + isFetch: false, + isResetHard: false, + isPatchPlatform: false, + isPatchServices: false, + iscommit: false, isPush: false, isPushForce: false, isResetHardToDevelop: false, remoteNamesToTry: ["origin"], - platformSnapVersion: null, workdir: "C:/work/tmp/git-scripted", maxParallel: 1, numRunningTasks: 0, + services: null, branchName: "SDCISA-15648-RemoveSlimPackaging-n1", commitMsg: "[SDCISA-15648] Remove slim packaging", + platformSnapVersion: "0.0.0-SNAPSHOT", + serviceSnapVersion: "0.0.0-SNAPSHOT", + parentVersion: null, }; - app.platformSnapVersion = "0.0.0-"+ app.branchName +"-SNAPSHOT"; + app.parentVersion = "0.0.0-"+ app.branchName +"-SNAPSHOT"; if( parseArgs(process.argv, app) !== 0 ){ process.exit(1); } if( app.isHelp ){ printHelp(); return; } run(app); -- cgit v1.1 From 913b1c23fd9c61a44a92ad23f8af3e9c5bf810cf Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 25 Apr 2024 18:45:44 +0200 Subject: dump --- src/main/c/common/windoof.h | 42 +++++++++++++++ src/main/c/postshit/launch/openshift/ocexec.c | 75 ++++++++++++++++++++++++++- src/main/nodejs/paisa-nonslim/foo.js | 6 +++ 3 files changed, 121 insertions(+), 2 deletions(-) diff --git a/src/main/c/common/windoof.h b/src/main/c/common/windoof.h index c156cb7..6ed9b41 100644 --- a/src/main/c/common/windoof.h +++ b/src/main/c/common/windoof.h @@ -3,13 +3,55 @@ # include #else +#include +//#define HANDLE void* +//typedef int BOOL; +//typedef unsigned long LPDWORD; +typedef struct _PROCESS_INFORMATION { + void* hProcess; + void* hThread; + uint32_t dwProcessId; + uint32_t dwThreadId; +} PROCESS_INFORMATION, *PPROCESS_INFORMATION, *LPPROCESS_INFORMATION; +typedef struct _SECURITY_ATTRIBUTES { + uint32_t nLength; + void* lpSecurityDescriptor; + int bInheritHandle; +} SECURITY_ATTRIBUTES, *PSECURITY_ATTRIBUTES, *LPSECURITY_ATTRIBUTES; +typedef struct _STARTUPINFOA { + uint32_t cb; + char *lpReserved; + char *lpDesktop; + char *lpTitle; + uint32_t dwX; + uint32_t dwY; + uint32_t dwXSize; + uint32_t dwYSize; + uint32_t dwXCountChars; + uint32_t dwYCountChars; + uint32_t dwFillAttribute; + uint32_t dwFlags; + short wShowWindow; + short cbReserved2; + uint8_t lpReserved2; + void *hStdInput, *hStdOutput, *hStdError; +} STARTUPINFOA, *LPSTARTUPINFOA; + + + +int CreateProcessA( char const*, char*, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, int, uint32_t, + void*, char const*, LPSTARTUPINFOA, LPPROCESS_INFORMATION ); + + +int GetExitCodeProcess(void*, unsigned long*); + diff --git a/src/main/c/postshit/launch/openshift/ocexec.c b/src/main/c/postshit/launch/openshift/ocexec.c index 49e68b4..45c4af9 100644 --- a/src/main/c/postshit/launch/openshift/ocexec.c +++ b/src/main/c/postshit/launch/openshift/ocexec.c @@ -23,8 +23,10 @@ SH: && true #define LOGERR(...) fprintf(stderr, __VA_ARGS__) #if !NDEBUG # define REGISTER +# define LOGDBG(...) fprintf(stderr, __VA_ARGS__) #else # define REGISTER register +# define LOGDBG(...) #endif #define FLG_isHelp (1<<0) @@ -36,6 +38,7 @@ typedef struct App App; struct App { int flg; char const *ocNamespace; + char const *podName; }; @@ -55,12 +58,20 @@ nextArg:; if( arg == NULL ) goto verifyArgs; if( !strcmp(arg,"--help") ){ app->flg |= FLG_isHelp; + //LOGDBG("[DEBUG] help -> true\n", arg); + return 0; }else if( !strcmp(arg,"-n") || !strcmp(arg,"--namespace") ){ arg = argv[iArg++]; if( arg == NULL ){ LOGERR("EINVAL: %s needs value\n", argv[iArg-2]); return-1; } app->ocNamespace = arg; + //LOGDBG("[DEBUG] namespace -> \"%s\"\n", arg); + }else if( !strcmp(arg,"-p") || !strcmp(arg,"--pod") ){ + arg = argv[iArg++]; + if( arg == NULL ){ LOGERR("EINVAL: %s needs value\n", argv[iArg-2]); return-1; } + app->podName = arg; + //LOGDBG("[DEBUG] pod -> \"%s\"\n", arg); }else{ - LOGERR("EINVAL: %s\n", arg); + LOGERR("EINVAL: %s\n", arg); return -1; } goto nextArg; verifyArgs: @@ -68,12 +79,72 @@ verifyArgs: } +static int fetchPodnames( App*app ){ + assert(!"TODO_hCICALJrAgDwNgIAZ0ACAD9sAgB5UwIA"); + return -1; +} + + +static int resolvePodname( App*app ){ + REGISTER int err; + err = fetchPodnames(app); + if( err ) return err; + if( !strcmp(app->podName, "houston") ){ + } +} + + +static int resolveNamespace( App*app ){ + if(0){ + }else if( !strcmp(app->ocNamespace,"test") ){ + app->ocNamespace = "isa-houston-test"; + }else if( !strcmp(app->ocNamespace,"int") ){ + app->ocNamespace = "isa-houston-int"; + }else if( !strcmp(app->ocNamespace,"preprod") ){ + app->ocNamespace = "isa-houston-preprod"; + }else{ + LOGDBG("[DEBUG] Use oc namespace as provided: \"%s\"\n", app->ocNamespace); + } + return 0; +} + + +static int run( App*app ){ + REGISTER int err; + err = resolveNamespace(app); if( err ) return err; + err = resolvePodname(app); if( err ) return err; + + LOGDBG("ENOTSUP: TODO continue here %s:%d\n", __FILE__, __LINE__); + + PROCESS_INFORMATION proc; + err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc); + if( err == 0 ){ + LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; } + err = WaitForSingleObject(proc.hProcess, INFINITE); + if( err != WAIT_OBJECT_0 ){ + LOGERR("ERROR: WaitForSingleObject() -> %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; } + long unsigned exitCode; + err = GetExitCodeProcess(proc.hProcess, &exitCode); + if( err == 0 ){ + LOGERR("ERROR: GetExitCodeProcess(): %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; } + if( (exitCode & 0x7FFFFFFF) != exitCode ){ + LOGERR("EDOM: Exit code %lu out of bounds. %s:%d\n", exitCode, strrchr(__FILE__,'/')+1, __LINE__); + err = -1; goto endFn; + } +} + + int main( int argc, char**argv ){ REGISTER int err; App app = {0}; assert((void*)0 == NULL); #define app (&app) - if( !parseArgs(argc, argv, app) ){ err = -1; goto endFn; } + if( parseArgs(argc, argv, app) ){ err = -1; goto endFn; } + LOGDBG("[DEBUG] flags are 0x%X\n", app->flg); if( app->flg & FLG_isHelp ){ printHelp(); err = 0; goto endFn; } + err = run(app); endFn: return !!err; #undef app diff --git a/src/main/nodejs/paisa-nonslim/foo.js b/src/main/nodejs/paisa-nonslim/foo.js index ab17a86..8408497 100644 --- a/src/main/nodejs/paisa-nonslim/foo.js +++ b/src/main/nodejs/paisa-nonslim/foo.js @@ -1,3 +1,9 @@ +/* + +Related: +- [Remove Slim Packaging](SDCISA-15648) + +*/ ;(function(){ "use-strict"; const child_process = require("child_process"); -- cgit v1.1 From 96b36646255a2cbc932f879a6bd29ee4500e05e3 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 28 Apr 2024 12:56:37 +0200 Subject: Add some notes about ff ram cache and burn an ISO. --- doc/note/burncdrom/growisofs.txt | 20 ++++++++++++++++++++ doc/note/mount/fstab.txt | 13 +++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 doc/note/burncdrom/growisofs.txt create mode 100644 doc/note/mount/fstab.txt diff --git a/doc/note/burncdrom/growisofs.txt b/doc/note/burncdrom/growisofs.txt new file mode 100644 index 0000000..f9b0ad5 --- /dev/null +++ b/doc/note/burncdrom/growisofs.txt @@ -0,0 +1,20 @@ + +## Install + +apt install genisoimage + + +## Burn from ISO to optical media + +growisofs -dvd-compat -Z /dev/srX=path/to/my.iso + + +## Get checksum of disc + +- Use isoinfo (from genisoimage pkg) to get size params. +- Use dd parameterized by previous output. + + isoinfo -d dev=/dev/srX + dd bs=${Logical block size} count=${Volume size} if=/dev/srX | md5sum -b + + diff --git a/doc/note/mount/fstab.txt b/doc/note/mount/fstab.txt new file mode 100644 index 0000000..de642ce --- /dev/null +++ b/doc/note/mount/fstab.txt @@ -0,0 +1,13 @@ + +fstab +===== + +## Moving firefox cache to RAM + +Effect: Faster at runtime, slower at startup. + +fstab entry: +none /home/YOURNAME/.cache/mozilla/firefox tmpfs noatime,noexec,users 0 0 + + + -- cgit v1.1 From daefa32665c77e78e7708c19095846c3d725b717 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 28 Apr 2024 17:03:18 +0200 Subject: Add hint how to create bootable USB from ISO. --- doc/note/burncdrom/burncdrom.txt | 28 ++++++++++++++++++++++++++++ doc/note/burncdrom/growisofs.txt | 20 -------------------- doc/note/compress/xz.txt | 5 +++++ 3 files changed, 33 insertions(+), 20 deletions(-) create mode 100644 doc/note/burncdrom/burncdrom.txt delete mode 100644 doc/note/burncdrom/growisofs.txt create mode 100644 doc/note/compress/xz.txt diff --git a/doc/note/burncdrom/burncdrom.txt b/doc/note/burncdrom/burncdrom.txt new file mode 100644 index 0000000..1b98f6a --- /dev/null +++ b/doc/note/burncdrom/burncdrom.txt @@ -0,0 +1,28 @@ + +How to handle CD/DVD burning +============================ + +## Install growisofs + + apt install genisoimage + + +## Burn from ISO to optical media + +growisofs -dvd-compat -Z /dev/srX=path/to/my.iso + + +## Get checksum of disc + +- Use isoinfo (from genisoimage pkg) to get size params. +- Use dd parameterized by previous output. + + isoinfo -d dev=/dev/srX + dd bs=${Logical block size} count=${Volume size} if=/dev/srX | md5sum -b + + +## Create bootable USB from ISO + + dd bs=4M if=path/to/my.iso of=/dev/sdX status=progress oflag=sync + + diff --git a/doc/note/burncdrom/growisofs.txt b/doc/note/burncdrom/growisofs.txt deleted file mode 100644 index f9b0ad5..0000000 --- a/doc/note/burncdrom/growisofs.txt +++ /dev/null @@ -1,20 +0,0 @@ - -## Install - -apt install genisoimage - - -## Burn from ISO to optical media - -growisofs -dvd-compat -Z /dev/srX=path/to/my.iso - - -## Get checksum of disc - -- Use isoinfo (from genisoimage pkg) to get size params. -- Use dd parameterized by previous output. - - isoinfo -d dev=/dev/srX - dd bs=${Logical block size} count=${Volume size} if=/dev/srX | md5sum -b - - diff --git a/doc/note/compress/xz.txt b/doc/note/compress/xz.txt new file mode 100644 index 0000000..b7dff5f --- /dev/null +++ b/doc/note/compress/xz.txt @@ -0,0 +1,5 @@ + + + xz --keep --lzma2=preset=9,nice=273,dict=1G -vv + + -- cgit v1.1 From f1df3c587c6da6aff011243bdb01c8076416b36e Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 28 Apr 2024 17:55:13 +0200 Subject: Update "How to create bootable ISO USB". --- doc/note/burncdrom/burncdrom.txt | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/doc/note/burncdrom/burncdrom.txt b/doc/note/burncdrom/burncdrom.txt index 1b98f6a..4c6f163 100644 --- a/doc/note/burncdrom/burncdrom.txt +++ b/doc/note/burncdrom/burncdrom.txt @@ -7,11 +7,16 @@ How to handle CD/DVD burning apt install genisoimage -## Burn from ISO to optical media +## Burn to optical media from ISO growisofs -dvd-compat -Z /dev/srX=path/to/my.iso +## "Burn" to USB from ISO + + dd bs=4M if=path/to/my.iso of=/dev/sdX status=progress oflag=sync + + ## Get checksum of disc - Use isoinfo (from genisoimage pkg) to get size params. @@ -21,8 +26,13 @@ growisofs -dvd-compat -Z /dev/srX=path/to/my.iso dd bs=${Logical block size} count=${Volume size} if=/dev/srX | md5sum -b -## Create bootable USB from ISO +## Get checksum of usb drive - dd bs=4M if=path/to/my.iso of=/dev/sdX status=progress oflag=sync +- Use stat to get block/count. +- Divide num by drive block size (likely 4096). +- Use dd parameterized by previous output. + + stat -c '%s' my.iso + dd bs=${Logical block size} count=${Volume size} if=/dev/sdx | md5sum -b -- cgit v1.1 From b8db811a29c4e36e8c160a02bb43ab88e3e39ebe Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Mon, 29 Apr 2024 00:41:42 +0200 Subject: Add notes about tux-six debian upgrade. --- doc/note/setup-debian/etc-environment | 18 ++++++ doc/note/setup-debian/setup-debian.txt | 108 +++++++++++++++++++++++++++++++++ doc/note/ssh/ssh-usage.txt | 5 ++ 3 files changed, 131 insertions(+) create mode 100644 doc/note/setup-debian/etc-environment create mode 100644 doc/note/setup-debian/setup-debian.txt diff --git a/doc/note/setup-debian/etc-environment b/doc/note/setup-debian/etc-environment new file mode 100644 index 0000000..1a67ae1 --- /dev/null +++ b/doc/note/setup-debian/etc-environment @@ -0,0 +1,18 @@ + +# Originally copy-pasted from latitude-E6530 + +# Need that duplicate here because openbox is too stupid to load its own env +# file. + +# Configure UI colors +# .. for GTK-2 +#GTK2_RC_FILES=/usr/share/themes/Adwaita-dark/gtk-2.0/gtkrc +# .. for GTK-3 +#GTK_THEME=Adwaita:dark +# .. Qt 5 (needs 'qt5-style-plugins' to be installed) +#QT_QPA_PLATFORMTHEME=gtk2 + +# Disable QT DPI scaling. Can be really helpful for eg vlc in a multi-monitor +# setup. +#QT_AUTO_SCREEN_SCALE_FACTOR=0 + diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt new file mode 100644 index 0000000..a79cca3 --- /dev/null +++ b/doc/note/setup-debian/setup-debian.txt @@ -0,0 +1,108 @@ + +How to Setup debian for MY needs +================================ + +## Partitioning + +Use the smaller & faster SSD for the system. Make sure it consists of TWO +partitions so system can be easily upgraded later by just replacing it by a new +install in the other partition. + +I prefer to put users home to a larger (maybe a HDD) for data storage. If +there's only ONE drive, we need an additional partition for the home too. + + +## Base system + +Install base system through debians ISO installers. Usually net installer is +fine. + +Do NOT install ANY additional bloat! Eg untick ALL extra software like desktops +etc. Even disable the standard-system-utilities option. + + +## Install core tools + +ALWAYS use '--no-install-recommends' when installing something to prevent +useless bloat to be installed. + + && apt install -y --no-install-recommends vim net-tools openssh-server openssh-client bash bash-completion + + +## Setup firewall + +WARN: Does NOT setup the effective rules. Rules need to be filled in by admin. + +WARN: This snippet may cut-off network connections. Including your remote shell! + + && $SUDO apt install -y --no-install-recommends iptables iptables-persistent \ + && printf '# TODO add contents here\n' | $SUDO tee /etc/iptables/src-default >/dev/null \ + && printf '\n[WARN ] Needs more setup: /etc/iptables/src-default\n\n' \ + && printf '%s\n' \ + '## Apply from file' '' \ + 'ncat /etc/iptables/src-default | $SUDO iptables-restore' '' \ + '## store current session as default' '' \ + '$SUDO iptables-save | $SUDO tee /etc/iptables/rules.v4 > /dev/null' \ + | $SUDO tee /etc/iptables/README >/dev/null \ + && printf '# TODO setup file contents\n' | $SUDO tee /etc/iptables/src-default4 >/dev/null \ + && printf '%s\n' \ + '*filter' '' \ + '# Loopback' \ + '-A INPUT -i lo -j ACCEPT' \ + '-A OUTPUT -o lo -j ACCEPT' '' \ + '# Log blocked connection attemps' \ + '-A INPUT -j LOG --log-prefix "Fw6BadInn: " --log-level 6' \ + '-A FORWARD -j LOG --log-prefix "Fw6BadFwd: " --log-level 6' \ + '-A OUTPUT -j LOG --log-prefix "Fw6BadOut: " --log-level 6' '' \ + '# Disallow any non-whitelisted packets' \ + '-A INPUT -j DROP' \ + '-A FORWARD -j REJECT' \ + '-A OUTPUT -j REJECT' '' \ + 'COMMIT' | $SUDO tee /etc/iptables/src-default6 >/dev/null \ + && printf '%s\n' \ + '*filter' \ + '-A INPUT -j ACCEPT' \ + '-A FORWARD -j ACCEPT' \ + '-A OUTPUT -j ACCEPT' \ + 'COMMIT' | $SUDO tee /etc/iptables/src-allowAll4 >/dev/null \ + && $SUDO touch /etc/iptables/src-tmp \ + + +## Mount home partition + +# /etc/fstab +UUID=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx /mnt/nameOfHdd ext4 noatime 0 2 +/mnt/nameOfHdd/home /home none bind 0 0 + + +## Configure Locale + +- In "/etc/locale.gen" Enable all of: + "de_CH.UTF-8 UTF-8", "de_CH ISO-8859-1", "en_DK.UTF-8 UTF-8", "en_DK ISO-8859-1". +- Run "locale-gen". +- Check list with "locale -a". +- Change "/etc/default/locale" contents to: + LANG=en_DK.UTF-8 + LANGUAGE="en_US:en" + + +## Install Desktop Env + + && $SUDO apt install -y --no-install-recommends xorg openbox mate-terminal lightdm light-locker feh scrot lxpanel qalculate-gtk gmrun gnome-system-monitor \ + && mkdir ~/.config ~/.config/openbox || true \ + && update-alternatives \ + +Populate "/etc/environment" as described by "./etc-environment". + + +## Install daily-use tools + + && apt install -y --no-install-recommends vim htop pv openssh-client iptables iptables-persistence \ + nginx vlc qemu-utils qemu-system keepassxc gpg firefox thunderbird gnome-themes-extra \ + file-roller zip unzip xz-utils p7zip-full alsamixer pulseaudio pavucontrol audacity eom \ + darktable gimp \ + + + + + diff --git a/doc/note/ssh/ssh-usage.txt b/doc/note/ssh/ssh-usage.txt index f9bc2a8..ff76f4c 100644 --- a/doc/note/ssh/ssh-usage.txt +++ b/doc/note/ssh/ssh-usage.txt @@ -30,6 +30,11 @@ request to "localhost:7080" on HOST. ssh -o 'ProxyCommand ncat -p12345 %h %p' MY_SERVER +## Get rid of bullshit warnings + + -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null + + ## Run Cmd via jumphost Source: "https://www.cyberciti.biz/faq/linux-unix-ssh-proxycommand-passing-through-one-host-gateway-server/" -- cgit v1.1 From ca4810c86a6f4d3d9133b26458cc6972afd44325 Mon Sep 17 00:00:00 2001 From: andreas tux-book Date: Mon, 29 Apr 2024 03:06:59 +0200 Subject: Add more packages to install. --- doc/note/qemu/qemu.txt | 2 +- doc/note/setup-debian/setup-debian.txt | 14 ++++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 395d11d..23984a9 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -194,7 +194,7 @@ true `# SMB client debian` \ ### Add those in "/etc/fstab" to setup mount automatically at boot: ### HINT: mkdir /home/user/build - //10.0.2.2/sharename /mnt/sharename cifs password=,uid=1000,gid=1000,user 0 0 + //10.0.2.2/sharename /mnt/sharename cifs password=,uid=1000,gid=1000,user,vers=3.0 0 0 /home/user/build /mnt/sharename/build none bind 0 0 List smb shares (eg debugging) diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index a79cca3..90c35b1 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -88,7 +88,7 @@ UUID=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx /mnt/nameOfHdd ext4 noatime 0 2 ## Install Desktop Env - && $SUDO apt install -y --no-install-recommends xorg openbox mate-terminal lightdm light-locker feh scrot lxpanel qalculate-gtk gmrun gnome-system-monitor \ + && $SUDO apt install -y --no-install-recommends xorg openbox mate-terminal lightdm light-locker feh scrot lxpanel qalculate-gtk gmrun gnome-system-monitor vim-gtk3 \ && mkdir ~/.config ~/.config/openbox || true \ && update-alternatives \ @@ -98,11 +98,13 @@ Populate "/etc/environment" as described by "./etc-environment". ## Install daily-use tools && apt install -y --no-install-recommends vim htop pv openssh-client iptables iptables-persistence \ - nginx vlc qemu-utils qemu-system keepassxc gpg firefox thunderbird gnome-themes-extra \ - file-roller zip unzip xz-utils p7zip-full alsamixer pulseaudio pavucontrol audacity eom \ - darktable gimp \ - - + nginx-light vlc qemu-utils qemu-system keepassxc gpg firefox chromium thunderbird \ + gnome-themes-extra file-roller zip unzip xz-utils p7zip-full alsamixer pulseaudio pavucontrol \ + audacity eom darktable gimp git tigervnc-viewer samba wireshark file evince \ + libreoffice-writer libreoffice-calc libreoffice-draw libxrender1 libgl1 \ + fonts-crosextra-caladea fonts-crosextra-carlito fonts-dejavu fonts-liberation \ + fonts-liberation2 fonts-linuxlibertine fonts-noto-core fonts-noto-mono fonts-noto-ui-core \ + fonts-sil-gentium-basic \ -- cgit v1.1 From 12be50cc306ccf46e38a17a820d2c3ca313dab27 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Mon, 29 Apr 2024 13:18:38 +0200 Subject: Add more deb pkgs to install. --- doc/note/nginx/nginx.txt | 5 +++-- doc/note/setup-debian/setup-debian.txt | 10 +++++----- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/doc/note/nginx/nginx.txt b/doc/note/nginx/nginx.txt index 0550e7f..75f2431 100644 --- a/doc/note/nginx/nginx.txt +++ b/doc/note/nginx/nginx.txt @@ -12,8 +12,10 @@ # - set "server_name" to meaningful value. # #daemon off; # run in foreground (eg from cli) + #user www-data; + #worker_processes auto; + pid /run/nginx.pid; events {} - pid /var/run/nginx.pid; http { access_log /dev/stdout; # Directories nginx needs configured to start up. @@ -37,7 +39,6 @@ return 200 "Example says hi"; } } - } diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index 90c35b1..a6ff067 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -97,14 +97,14 @@ Populate "/etc/environment" as described by "./etc-environment". ## Install daily-use tools - && apt install -y --no-install-recommends vim htop pv openssh-client iptables iptables-persistence \ - nginx-light vlc qemu-utils qemu-system keepassxc gpg firefox chromium thunderbird \ - gnome-themes-extra file-roller zip unzip xz-utils p7zip-full alsamixer pulseaudio pavucontrol \ - audacity eom darktable gimp git tigervnc-viewer samba wireshark file evince \ + && $SUDO apt install -y --no-install-recommends vim htop pv openssh-client iptables \ + iptables-persistent nginx-light vlc qemu-utils qemu-system keepassxc gpg firefox-esr \ + chromium thunderbird gnome-themes-extra file-roller zip unzip xz-utils p7zip-full \ + pulseaudio pavucontrol audacity eom darktable gimp git tigervnc-viewer samba wireshark file \ + evince lame flac opus-tools pdftk-java \ libreoffice-writer libreoffice-calc libreoffice-draw libxrender1 libgl1 \ fonts-crosextra-caladea fonts-crosextra-carlito fonts-dejavu fonts-liberation \ fonts-liberation2 fonts-linuxlibertine fonts-noto-core fonts-noto-mono fonts-noto-ui-core \ fonts-sil-gentium-basic \ - -- cgit v1.1 From 37d0a8ba8bb4cee538b5cf7ff2c7686d203f7a02 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Mon, 29 Apr 2024 18:38:17 +0200 Subject: Add more toBeInstalled deb pkgs. --- doc/note/setup-debian/setup-debian.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index a6ff067..261a27b 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -101,7 +101,7 @@ Populate "/etc/environment" as described by "./etc-environment". iptables-persistent nginx-light vlc qemu-utils qemu-system keepassxc gpg firefox-esr \ chromium thunderbird gnome-themes-extra file-roller zip unzip xz-utils p7zip-full \ pulseaudio pavucontrol audacity eom darktable gimp git tigervnc-viewer samba wireshark file \ - evince lame flac opus-tools pdftk-java \ + evince lame flac opus-tools pdftk-java sqlite3 manpages-dev gdb ffmpeg \ libreoffice-writer libreoffice-calc libreoffice-draw libxrender1 libgl1 \ fonts-crosextra-caladea fonts-crosextra-carlito fonts-dejavu fonts-liberation \ fonts-liberation2 fonts-linuxlibertine fonts-noto-core fonts-noto-mono fonts-noto-ui-core \ -- cgit v1.1 From e6cfa987f8b946c0ad1998e3561115f138aef54e Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 30 Apr 2024 15:40:45 +0200 Subject: Doc how to get mvn version. Make noslim helper print isaVersion. --- doc/note/links/links.txt | 4 ++ doc/note/maven/maven.txt | 2 + src/main/nodejs/paisa-nonslim/foo.js | 97 ++++++++++++++++++++++++++++++++++-- 3 files changed, 100 insertions(+), 3 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 838cc1d..25dedde 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -590,3 +590,7 @@ Links (Aka argument amplifiers) ## (TODO put to a better place in here) - [how to handle vertx promise fail/errors properly](https://github.com/swisspost/vertx-redisques/pull/164#discussion_r1562105007) +- [null VS empty](https://jira.post.ch/browse/SDCISA-14534) + + + diff --git a/doc/note/maven/maven.txt b/doc/note/maven/maven.txt index 4349551..a9fd248 100644 --- a/doc/note/maven/maven.txt +++ b/doc/note/maven/maven.txt @@ -14,6 +14,8 @@ mvn versions:set-property -DgenerateBackupPoms=false -DallowSnapshots=true -Dpro export MAVEN_OPTS="..." +## Get project version without any other bullshit +mvn help:evaluate -o -q -DforceStdout -Dexpression=project.version && echo ## Deploy paisa snapshot mvn deploy -DaltDeploymentRepository=artifactory-snapshots::default::https://artifactory.tools.pnet.ch/artifactory/libs-snapshot-local diff --git a/src/main/nodejs/paisa-nonslim/foo.js b/src/main/nodejs/paisa-nonslim/foo.js index 8408497..3d4f3e8 100644 --- a/src/main/nodejs/paisa-nonslim/foo.js +++ b/src/main/nodejs/paisa-nonslim/foo.js @@ -12,6 +12,7 @@ Related: const zlib = require("zlib"); const noop = function(){}; const log = process.stderr; + const out = process.stdout; const logAsString = function( buf ){ log.write(buf.toString()); }; setImmediate(main); @@ -45,6 +46,9 @@ Related: +" remote git repo). The force variant will replace existing branches\n" +" on the remnote. If given multiple times, less-invasive wins.\n" +" \n" + +" --print-isa-version\n" + +" Prints an isaVersion JSON that can be fed to preflux.\n" + +" \n" // not impl yet //+" --max-parallel \n" //+" How many tasks to run concurrently. Defaults to 1. Which means to\n" @@ -79,8 +83,8 @@ Related: }else if( arg == "--push-force" ){ if( app.isPush ){ log.write("EINVAL: only one of push and push-force allowed\n"); return-1; } app.isPushForce = true; - }else if( arg == "--reset-hard-to-develop" ){ - app.isResetHardToDevelop = true; + }else if( arg == "--print-isa-version" ){ + app.isPrintIsaVersion = true; //}else if( arg == "--max-parallel" ){ // arg = argv[++iA]; // if( !/^[0-9]+$/.test(arg) ){ log.write("EINVAL: --max-parallel "+ arg +"\n"); return -1; } @@ -204,6 +208,90 @@ Related: } + function getVersionByServiceName(app, svcName, onDone){ + /* if we did patch services, we already know the version without + * lookup. This is a performance optimization, because maven performs + * absolutely terrible. Performance DOES matter! */ + //if( app.isPatchServices ){ + setImmediate(onDone, null, app.jenkinsSnapVersion); + //}else{ + // wasteOurTimeBecausePerformanceDoesNotMatter(); + //} + //function wasteOurTimeBecausePerformanceDoesNotMatter( ex ){ + // if( ex ) throw ex; + // var stdoutBufs = []; + // /* SHOULD start maven with low prio to not kill windoof. But I + // * guess spawning a process with other prio is YAGNI, and so we're + // * now fucked. Therefore I wish you happy time-wasting, as the only + // * option left is to NOT start too many maven childs + // * simultaneously. */ + // var child = child_process.spawn( + // "mvn", ["help:evaluate", "-o", "-q", "-DforceStdout", "-Dexpression=project.version"], + // { cwd:workdirOfSync(app, svcName) } + // ); + // child.on("error", console.error.bind(console)); + // child.stderr.on("data", logAsString); + // child.stdout.on("data", stdoutBufs.push.bind(stdoutBufs)); + // child.on("close", function( code, signal ){ + // if( code !== 0 || signal !== null ){ + // endFn(Error("code="+ code +", signal="+ signal +"")); + // return; + // } + // if( stdoutBufs.length <= 0 ) throw Error("maven has failed"); + // var version = stdoutBufs.join().trim(); + // onDone(null, version); + // }); + //} + } + + + function printIsaVersion( app, onDone ){ + var iSvcQuery = 0, iSvcPrinted = 0; + printIntro(); + function printIntro( ex ){ + if( ex ) throw ex; + var epochMs = Date.now(); + out.write('{\n'); + out.write(' "timestamp": "'+ new Date().toISOString() +'",\n'); + out.write(' "isaVersionId": "SDCISA-15648-'+ epochMs +'",\n'); + out.write(' "isaVersionName": "SDCISA-15648-'+ epochMs +'",\n'); + out.write(' "trial": true,\n'); + out.write(' "services": [\n'); + out.write(' { "name": "eagle", "version": "02.23.01.00" },\n'); + out.write(' { "name": "storage", "version": "00.25.00.02" },\n'); + out.write(' { "name": "platform", "version": "'+ app.platformJenkinsVersion +'" }'); + /* maven performance is an absolute terrible monster. + * Problem 1: Doing this sequentially takes forever. + * Problem 2: Doing this parallel for all makes windoof freeze. + * Workaround: Do at most a few of them in parallel. */ + for( var i = 3 ; i ; --i ) nextService(); + } + function nextService( ex ){ + if( ex ) throw ex; + if( iSvcQuery >= app.services.length ){ /*printTail();*/ return; } + var svcName = app.services[iSvcQuery++]; + getVersionByServiceName(app, svcName, function(e,r){ printService(e,r,svcName); }); + } + function printService( ex, svcVersion, svcName ){ + if( ex ) throw ex; + if( typeof svcVersion != "string") throw Error(svcVersion); + iSvcPrinted += 1; + out.write(",\n "); + out.write('{ "name": "'+ svcName +'", "version": "'+ svcVersion +'" }'); + if( iSvcPrinted >= app.services.length ){ printTail(); }else{ nextService(); } + } + function printTail( ex ){ + if( ex ) throw ex; + out.write('\n'); + out.write(' ],\n'); + out.write(' "featureSwitches": [],\n'); + out.write(' "mergedBundles": []\n'); + out.write('}\n'); + onDone(/*ex*/null, /*ret*/null); + } + } + + function pushService( app, thingyName, onDone ){ if( typeof onDone != "function" ){ throw TypeError("onDone"); } var iRemoteNameToTry = 0; @@ -757,6 +845,7 @@ Related: forEachJettyService(app, pushService, onDone); }); } + if( app.isPrintIsaVersion ){ actions.push(printIsaVersion); } actions.push(function( app, onDone ){ log.write("[INFO ] App done\n"); }); @@ -780,7 +869,7 @@ Related: iscommit: false, isPush: false, isPushForce: false, - isResetHardToDevelop: false, + isPrintIsaVersion: false, remoteNamesToTry: ["origin"], workdir: "C:/work/tmp/git-scripted", maxParallel: 1, @@ -790,6 +879,8 @@ Related: commitMsg: "[SDCISA-15648] Remove slim packaging", platformSnapVersion: "0.0.0-SNAPSHOT", serviceSnapVersion: "0.0.0-SNAPSHOT", + platformJenkinsVersion: "0.0.0-SDCISA-15648-RemoveSlimPackaging-n1-SNAPSHOT", + jenkinsSnapVersion: "0.0.0-SDCISA-15648-RemoveSlimPackaging-n1-SNAPSHOT", parentVersion: null, }; app.parentVersion = "0.0.0-"+ app.branchName +"-SNAPSHOT"; -- cgit v1.1 From 401bf00c23fd4befcc99157ffd606f6b56ba1588 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 1 May 2024 10:53:48 +0200 Subject: Fix houston patch --- src/main/patch/houston/default.patch | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/src/main/patch/houston/default.patch b/src/main/patch/houston/default.patch index be226b3..b5b7639 100644 --- a/src/main/patch/houston/default.patch +++ b/src/main/patch/houston/default.patch @@ -2,7 +2,7 @@ Custom houston patch to have a "usable" service at all. Patch based on "develop" aka - "125344e940ebc090183bad7fc096938289f15e3f" from "2024-01-16". + "497a9477c9e2100130f9a29ec130c1131220c935" from "2024-04-22". --- a/pom.xml @@ -50,9 +50,7 @@ --- a/houston-process/pom.xml +++ b/houston-process/pom.xml -@@ -25,6 +25,26 @@ - - +@@ -27,3 +27,23 @@ + + org.slf4j @@ -76,26 +74,36 @@ + - ch.post.it.paisa.houston + + +--- a/houston-process/pom.xml ++++ b/houston-process/pom.xml +@@ -212,6 +232,2 @@ + +- +- org.apache.logging.log4j +- log4j-slf4j2-impl +- + --- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java +++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -@@ -68,6 +68,9 @@ public class Deployer { +@@ -98,6 +98,9 @@ public class Deployer { private static final Logger LOGGER = LoggerFactory.getLogger(Deployer.class); public static void main(String[] args) throws Exception { + boolean isAssertIsEnabled = false; + assert isAssertIsEnabled = true; + if (!isAssertIsEnabled) throw new UnsupportedOperationException("Enable assertions to fix this problem -> https://stackoverflow.com/a/68893479/4415884"); - setStartupProperties(); - Props.prepare(); + throwIfLoggerAmbiguous(); + configureObjectMapper(); --- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java +++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java -@@ -378,7 +378,7 @@ public class Deployer { +@@ -471,7 +477,7 @@ public class Deployer { // All other queues (typically to backend services) with a slow-down pattern after // failed delivery qc.add( -- cgit v1.1 From d504a9d296031b2c7593a404d07bc6190242568e Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Fri, 3 May 2024 19:54:04 +0200 Subject: In NoSlim helper, handle empty array. --- doc/note/links/links.txt | 1 + src/main/nodejs/paisa-nonslim/foo.js | 15 ++++++++++++--- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt index 25dedde..049363c 100644 --- a/doc/note/links/links.txt +++ b/doc/note/links/links.txt @@ -571,6 +571,7 @@ Links (Aka argument amplifiers) ## Angular is terrible - [Why angular sucks](https://medium.com/dirtyjs/why-angular-2-4-5-6-sucks-afb36567ad68) +- [JS bloat everywhere](https://tonsky.me/blog/js-bloat/) ## java try-with-resources behavior [Exception Scenarios for Java's try-with-resources](https://dev.to/moaxcp/exception-scenarios-for-java-s-try-with-resources-63m) diff --git a/src/main/nodejs/paisa-nonslim/foo.js b/src/main/nodejs/paisa-nonslim/foo.js index 3d4f3e8..43cf8aa 100644 --- a/src/main/nodejs/paisa-nonslim/foo.js +++ b/src/main/nodejs/paisa-nonslim/foo.js @@ -519,9 +519,18 @@ Related: ); child.on("error", console.error.bind(console)); child.stderr.on("data", logAsString); - child.on("close", function(){ - nextJettyService(); - }); + child.on("close", removeEmptyArray); + } + /* Pipeline is too dump for an empty array */ + function removeEmptyArray( ex ){ + if( ex ) throw ex; + var child = child_process.spawn( + "sed", [ "-i", "-E", "s_^(.*?).buildMaven\\(\\[\\]\\))(.*?)$_\\1\\2_", "Jenkinsfile" ], + { cwd: workdirOfSync(app, jettyService) }, + ); + child.on("error", console.error.bind(console)); + child.stderr.on("data", logAsString); + child.on("close", nextJettyService); } function onNoMoreJettyServices(){ onDone(null, null); -- cgit v1.1 From baea71706d4977519414e1b5049631908f22a5c2 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 4 May 2024 11:58:40 +0200 Subject: (setup) add xxd pkg. --- doc/note/setup-debian/setup-debian.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index 261a27b..bd3ed3b 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -101,7 +101,7 @@ Populate "/etc/environment" as described by "./etc-environment". iptables-persistent nginx-light vlc qemu-utils qemu-system keepassxc gpg firefox-esr \ chromium thunderbird gnome-themes-extra file-roller zip unzip xz-utils p7zip-full \ pulseaudio pavucontrol audacity eom darktable gimp git tigervnc-viewer samba wireshark file \ - evince lame flac opus-tools pdftk-java sqlite3 manpages-dev gdb ffmpeg \ + evince lame flac opus-tools pdftk-java sqlite3 xxd manpages-dev gdb ffmpeg \ libreoffice-writer libreoffice-calc libreoffice-draw libxrender1 libgl1 \ fonts-crosextra-caladea fonts-crosextra-carlito fonts-dejavu fonts-liberation \ fonts-liberation2 fonts-linuxlibertine fonts-noto-core fonts-noto-mono fonts-noto-ui-core \ -- cgit v1.1 From 61ce26b5f7371b2ef7e34e62054bef67af59c2a8 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sat, 4 May 2024 18:47:34 +0200 Subject: (setup) put pkgs into groups. --- doc/note/setup-debian/setup-debian.txt | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index bd3ed3b..fd21e74 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -95,16 +95,30 @@ UUID=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx /mnt/nameOfHdd ext4 noatime 0 2 Populate "/etc/environment" as described by "./etc-environment". -## Install daily-use tools - - && $SUDO apt install -y --no-install-recommends vim htop pv openssh-client iptables \ - iptables-persistent nginx-light vlc qemu-utils qemu-system keepassxc gpg firefox-esr \ - chromium thunderbird gnome-themes-extra file-roller zip unzip xz-utils p7zip-full \ - pulseaudio pavucontrol audacity eom darktable gimp git tigervnc-viewer samba wireshark file \ - evince lame flac opus-tools pdftk-java sqlite3 xxd manpages-dev gdb ffmpeg \ +## Install whatever needed + + && $SUDO apt install -y --no-install-recommends \ + `# basic CLI` \ + vim htop pv openssh-client iptables iptables-persistent xxd zip unzip xz-utils p7zip-full \ + file \ + `# basic UI` \ + firefox-esr file-roller thunderbird chromium evince \ + `# software devel` \ + git sqlite3 manpages-dev gdb qemu-utils qemu-system wireshark samba tigervnc-viewer \ + `# server` \ + nginx-light \ + `# multimedia` \ + pulseaudio pavucontrol vlc audacity eom darktable gimp lame flac opus-tools ffmpeg \ + `# encryption` \ + keepassxc gpg \ + `# UI customization` \ + gnome-themes-extra \ + `# Office Suite` \ libreoffice-writer libreoffice-calc libreoffice-draw libxrender1 libgl1 \ fonts-crosextra-caladea fonts-crosextra-carlito fonts-dejavu fonts-liberation \ fonts-liberation2 fonts-linuxlibertine fonts-noto-core fonts-noto-mono fonts-noto-ui-core \ - fonts-sil-gentium-basic \ + fonts-sil-gentium-basic pdftk-java \ + `# Others` \ + lm-sensors fancontrol \ -- cgit v1.1 From e305be2d8a7471b542224496363104262954284d Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 5 May 2024 11:13:56 +0200 Subject: (setup) Add hint for nvidia/mesa --- doc/note/setup-debian/setup-debian.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index fd21e74..96aed99 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -120,5 +120,7 @@ Populate "/etc/environment" as described by "./etc-environment". fonts-sil-gentium-basic pdftk-java \ `# Others` \ lm-sensors fancontrol \ + `# Nvidia graphics (open)` \ + mesa-utils \ -- cgit v1.1 From 44e08eb032a8227fc8c17ca1dada3ea2f6f94507 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 5 May 2024 13:43:34 +0200 Subject: (setup) Add some notes about mesa drivers. --- doc/note/setup-debian/setup-debian.txt | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index 96aed99..b24a70f 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -100,7 +100,7 @@ Populate "/etc/environment" as described by "./etc-environment". && $SUDO apt install -y --no-install-recommends \ `# basic CLI` \ vim htop pv openssh-client iptables iptables-persistent xxd zip unzip xz-utils p7zip-full \ - file \ + file trash-cli \ `# basic UI` \ firefox-esr file-roller thunderbird chromium evince \ `# software devel` \ @@ -121,6 +121,8 @@ Populate "/etc/environment" as described by "./etc-environment". `# Others` \ lm-sensors fancontrol \ `# Nvidia graphics (open)` \ - mesa-utils \ + mesa-utils clinfo mesa-opencl-icd \ + `# Nvidia graphics (non-free, DoesNotWorkYet)` \ + nvidia-detect nvidia-tesla-470-driver linux-headers-amd64 \ -- cgit v1.1 From 5bf183e567f0cd822f9cfb118c041f400f04cea5 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 5 May 2024 18:12:06 +0200 Subject: Add ISO pkgs. Note how to connect isolated qemuVM to host cifs/samba share. --- doc/note/qemu/qemu.txt | 3 +++ doc/note/setup-debian/setup-debian.txt | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 23984a9..7d46d6a 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -54,6 +54,9 @@ qemu-system-x86_64 \ `# Isolated Network plus host port/cmd reachable from guest` \ -netdev 'user,id=n1,ipv6=off,restrict=y,guestfwd=tcp:10.0.2.9:80-cmd:ncat 127.0.0.1 80' \ -device e1000,netdev=n1 \ + `# Isolated Network with samba access to host` \ + -netdev 'user,id=n2,ipv6=off,restrict=y,guestfwd=tcp:10.0.2.9:139-cmd:ncat 127.0.0.1 139,guestfwd=tcp:10.0.2.9:445-cmd:ncat 127.0.0.1 445' \ + -device e1000,netdev=n2 \ `# 10.0.2.x network with host redirect` \ -netdev user,id=n0,ipv6=off,hostfwd=tcp:127.0.0.1:${SSH_PORT:-2222}-:22 \ -device e1000,netdev=n0 \ diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index b24a70f..b3c71b6 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -100,7 +100,7 @@ Populate "/etc/environment" as described by "./etc-environment". && $SUDO apt install -y --no-install-recommends \ `# basic CLI` \ vim htop pv openssh-client iptables iptables-persistent xxd zip unzip xz-utils p7zip-full \ - file trash-cli \ + file trash-cli genisoimage ncat \ `# basic UI` \ firefox-esr file-roller thunderbird chromium evince \ `# software devel` \ -- cgit v1.1 From 25cc2cf42fe2b0f8af65451bf2eb9196015e4851 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Sun, 5 May 2024 23:03:25 +0200 Subject: Add curl pkg --- doc/note/setup-debian/setup-debian.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index b3c71b6..435d6f0 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -100,7 +100,7 @@ Populate "/etc/environment" as described by "./etc-environment". && $SUDO apt install -y --no-install-recommends \ `# basic CLI` \ vim htop pv openssh-client iptables iptables-persistent xxd zip unzip xz-utils p7zip-full \ - file trash-cli genisoimage ncat \ + file trash-cli genisoimage ncat curl \ `# basic UI` \ firefox-esr file-roller thunderbird chromium evince \ `# software devel` \ -- cgit v1.1 From 09a11168d261ef5e5d43738db3fd511a7babddc1 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Tue, 7 May 2024 20:33:01 +0200 Subject: (setup) Add avahi daemon pkg. --- doc/note/setup-debian/setup-debian.txt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index 435d6f0..ed7794f 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -106,7 +106,9 @@ Populate "/etc/environment" as described by "./etc-environment". `# software devel` \ git sqlite3 manpages-dev gdb qemu-utils qemu-system wireshark samba tigervnc-viewer \ `# server` \ - nginx-light \ + nginx-light avahi-daemon \ + `# mDNS client & tools` \ + libnss-mdns avahi-utils \ `# multimedia` \ pulseaudio pavucontrol vlc audacity eom darktable gimp lame flac opus-tools ffmpeg \ `# encryption` \ -- cgit v1.1 From 8e29d34ac66df8e8da36c6a13d4b1e83b2209f42 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 8 May 2024 18:00:44 +0200 Subject: (setup) Add 'bc' pkg. --- doc/note/qemu/qemu.txt | 2 +- doc/note/setup-debian/setup-debian.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt index 7d46d6a..edaf9bc 100644 --- a/doc/note/qemu/qemu.txt +++ b/doc/note/qemu/qemu.txt @@ -174,7 +174,7 @@ Regular boot true `# SMB server debian` \ && hostpath=/path/to/host/dir \ && sharename=work \ - && apt install --no-install-recommends -y samba + && apt install --no-install-recommends -y samba \ && printf '[%s]\npath = %s\npublic = no\nwriteable = yes\nguest ok = yes\nforce user = andreas\n' "${sharename:?}" "${hostpath:?}" | $SUDO tee -a /etc/samba/smb.conf >/dev/null \ && $SUDO /etc/init.d/smbd restart \ && true diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index ed7794f..ac38bbd 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -121,7 +121,7 @@ Populate "/etc/environment" as described by "./etc-environment". fonts-liberation2 fonts-linuxlibertine fonts-noto-core fonts-noto-mono fonts-noto-ui-core \ fonts-sil-gentium-basic pdftk-java \ `# Others` \ - lm-sensors fancontrol \ + lm-sensors fancontrol bc \ `# Nvidia graphics (open)` \ mesa-utils clinfo mesa-opencl-icd \ `# Nvidia graphics (non-free, DoesNotWorkYet)` \ -- cgit v1.1 From a5386000f39c9bedb0cfe6924d33ff5c04cfd815 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Wed, 8 May 2024 23:03:05 +0200 Subject: Add some doc for ELF and PE32 dependency scanning. --- doc/note/binutils/dumpbin.txt | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/doc/note/binutils/dumpbin.txt b/doc/note/binutils/dumpbin.txt index 638cf8f..0b08077 100644 --- a/doc/note/binutils/dumpbin.txt +++ b/doc/note/binutils/dumpbin.txt @@ -2,17 +2,23 @@ DumpBin For Windoof =================== -## Analyze PE32 / PE32+ files. +Scan for unwanted dependencies +------------------------------ -TODO: This is unusable, because this only works with lots of bloat installed. +Linux: -Location: "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\dumpbin.exe" + find build/bin -type f -print0|xargs -0I? sh -c 'echo ?;readelf -d ?|egrep NEEDED|egrep -v "\\[(libc\\.|libz\\.|libm\\.|libdl\.|libpthread\.)"' -Help - dumpbin /? +Windoof: -List needed DLLs. + find build/bin/*.exe -print0|xargs -0I? sh -c 'echo ?;objdump -p ?|egrep -i DLL\ Name:|egrep -iv "(KERNEL32.dll|msvcrt.dll|USER32.dll|WS2_32.dll)"' - dumpbin /DEPENDENTS foo.exe + + + +List Dll Dependencies Of A PE32 Executable +------------------------------------------ + + x86_64-w64-mingw32-objdump -p out.exe | grep 'DLL Name:' -- cgit v1.1 From 14c4f42c5a3cbdff8d09723bb353a30e5359df3f Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 9 May 2024 16:18:30 +0200 Subject: Add some more libs to scanner. --- doc/note/binutils/dumpbin.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/note/binutils/dumpbin.txt b/doc/note/binutils/dumpbin.txt index 0b08077..d71878b 100644 --- a/doc/note/binutils/dumpbin.txt +++ b/doc/note/binutils/dumpbin.txt @@ -12,7 +12,7 @@ Linux: Windoof: - find build/bin/*.exe -print0|xargs -0I? sh -c 'echo ?;objdump -p ?|egrep -i DLL\ Name:|egrep -iv "(KERNEL32.dll|msvcrt.dll|USER32.dll|WS2_32.dll)"' + find build/bin/*.exe -print0|xargs -0I? sh -c 'echo ?;objdump -p ?|egrep -i DLL\ Name:|egrep -iv "(KERNEL32.dll|msvcrt.dll|USER32.dll|WS2_32.dll|ADVAPI32.dll|GDI32.dll|IMM32.dll|ole32.dll|OLEAUT32.dll|SETUPAPI.dll|SHELL32.dll|VERSION.dll|WINMM.dll)"' -- cgit v1.1 From 95d934e8e3918832c03f05b2fc32cb5d5272cb83 Mon Sep 17 00:00:00 2001 From: Andreas Fankhauser hiddenalpha.ch Date: Thu, 9 May 2024 16:33:31 +0200 Subject: (setup) add rsync to setup. --- doc/note/setup-debian/setup-debian.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt index ac38bbd..b36945e 100644 --- a/doc/note/setup-debian/setup-debian.txt +++ b/doc/note/setup-debian/setup-debian.txt @@ -121,7 +121,7 @@ Populate "/etc/environment" as described by "./etc-environment". fonts-liberation2 fonts-linuxlibertine fonts-noto-core fonts-noto-mono fonts-noto-ui-core \ fonts-sil-gentium-basic pdftk-java \ `# Others` \ - lm-sensors fancontrol bc \ + lm-sensors fancontrol bc rsync \ `# Nvidia graphics (open)` \ mesa-utils clinfo mesa-opencl-icd \ `# Nvidia graphics (non-free, DoesNotWorkYet)` \ -- cgit v1.1