summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/main/c/PcapOne/PcapOne.c311
-rw-r--r--src/main/c/common/commonKludge.h16
-rw-r--r--src/main/c/foo/Asn1Digger.c416
-rw-r--r--src/main/c/foo/PemCodec.c218
-rw-r--r--src/main/firefox/gaga-plugin/main.js197
-rw-r--r--src/main/firefox/gaga-plugin/manifest.json18
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/json/JsonUtils.java8
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteChunkOStream.java24
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountInputStream.java21
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountOutputStream.java20
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CRLFtoLFOutputStream.java25
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyInputStream.java5
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyOutputStream.java14
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ConcatInputStream.java12
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/EmptyGzipInputStream.java7
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/IgnoreCloseInputStream.java14
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/IgnoreCloseOutputStream.java31
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/paisa/DomainValidation.java8
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/shell/ShellUtils.java11
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java24
-rw-r--r--src/main/lua/maven/FindLatestPaisaArtifacts.lua254
-rw-r--r--src/main/lua/maven/MvnCentralDepScan.lua1074
-rw-r--r--src/main/lua/misc/Asn1Digger.lua148
-rw-r--r--src/main/nodejs/ResClone/resclone.js18
-rw-r--r--src/main/patch/houston/default-20230203.patch52
-rw-r--r--src/main/patch/houston/default-20230214.patch56
-rw-r--r--src/main/patch/houston/default-20230331.patch56
-rw-r--r--src/main/patch/houston/default-20230503.patch56
-rw-r--r--src/main/patch/houston/default.patch74
-rwxr-xr-xsrc/main/shell/BackupByRsync/backup.sh18
-rw-r--r--src/test/java/ch/hiddenalpha/unspecifiedgarbage/crypto/Foo.java60
-rw-r--r--src/test/java/ch/hiddenalpha/unspecifiedgarbage/format/FormatUtilsTest.java113
32 files changed, 2860 insertions, 519 deletions
diff --git a/src/main/c/PcapOne/PcapOne.c b/src/main/c/PcapOne/PcapOne.c
new file mode 100644
index 0000000..2eb9e25
--- /dev/null
+++ b/src/main/c/PcapOne/PcapOne.c
@@ -0,0 +1,311 @@
+/* TODO fix this bullshit */
+typedef unsigned u_int;
+typedef unsigned short u_short;
+typedef unsigned char u_char;
+#include <pcap/pcap.h>
+/* endOf TODO */
+
+
+/* System */
+#include <assert.h>
+#include <stdlib.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <string.h>
+
+static char const*const DEV_STDIN = "/dev/stdin";
+
+#define FLG_isHelp (1<<0)
+#define FLG_isTcpPsh (1<<3)
+#define FLG_isTcpRst (1<<4)
+#define FLG_isTcpSyn (1<<5)
+#define FLG_isTcpFin (1<<6)
+#define FLG_isHttpReq (1<<7)
+#define FLG_isLlLinux (1<<12)
+#define FLG_isHdrPrinted (1<<13)
+#define FLG_INIT (0)
+
+typedef struct PcapOne PcapOne;
+
+
+struct PcapOne {
+ uint_least16_t flg;
+ const char *dumpFilePath;
+ char *pcapErrbuf;
+ pcap_t *pcap;
+ unsigned long frameNr;
+ struct/*most recent frame*/{
+ int llProto;
+ int llHdrEnd;
+ };
+ struct/*most recent packet*/{
+ int netProto;
+ int netBodyLen;
+ int netHdrEnd;
+ int_fast32_t netTotLen;
+ uint_least32_t ipSrcAddr, ipDstAddr;
+ };
+ struct/*most recent segment*/{
+ int trspBodyLen;
+ int trspSrcPort, trspDstPort;
+ int trspHdrEnd;
+ };
+ struct/*most recent http requst*/{
+ const uint8_t *httpReqHeadline;
+ int httpReqHeadline_len;
+ int httpReq_off; /* pkg offset from begin of most recent request */
+ };
+};
+
+
+/*BEG func fwd decl*/
+static void parse_ll_LINUX_SLL( PcapOne*, const struct pcap_pkthdr*, const u_char* );
+static void parse_net_IPv4( PcapOne*, const struct pcap_pkthdr*, const u_char* );
+static void parse_trsp_TCP( PcapOne*, const struct pcap_pkthdr*, const u_char* );
+static void parse_appl_HTTP_req( PcapOne*, const struct pcap_pkthdr*, const u_char* );
+static void printParsingResults( PcapOne*, const struct pcap_pkthdr* );
+/*END func fwd decl*/
+
+static void printHelp(){
+ #define STRQUOT_21a9ffbe344c0792ed88688d6c676359(s) #s
+ #define STRQUOT(s) STRQUOT_21a9ffbe344c0792ed88688d6c676359(s)
+ const char *basename = "/"__FILE__ + sizeof("/"__FILE__);
+ for(; basename[-1] != '/'; --basename );
+ printf("%s%s%s", " \n"
+ " ", basename, " " STRQUOT(PROJECT_VERSION) "\n"
+ " \n"
+ " Options:\n"
+ " \n"
+ " --pcap-stdin\n"
+ " Like --pcap but reading from stdin.\n"
+ " \n"
+ " --pcap <path>\n"
+ " Pcap file to operate on. Compressed files are NOT supported.\n"
+ " \n");
+ #undef STRQUOT_21a9ffbe344c0792ed88688d6c676359
+ #undef STRQUOT
+}
+
+
+static int parseArgs( PcapOne*app, int argc, char**argv ){
+ app->flg = FLG_INIT;
+ app->dumpFilePath = NULL;
+ for( int iA = 1 ; iA < argc ; ++iA ){
+ const char *arg = argv[iA];
+ if(0){
+ }else if( !strcmp(arg,"--help") ){
+ app->flg |= FLG_isHelp; return 0;
+ }else if( !strcmp(arg,"--pcap") ){
+ arg = argv[++iA];
+ if( arg == NULL ){ fprintf(stderr, "EINVAL --pcap needs value\n"); return -1; }
+ app->dumpFilePath = arg;
+ }else if( !strcmp(arg,"--pcap-stdin") ){
+ app->dumpFilePath = DEV_STDIN;
+ }else{
+ fprintf(stderr, "EINVAL: %s\n", arg); return -1;
+ }
+ }
+ if( app->dumpFilePath == NULL ){
+ fprintf(stderr, "EINVAL Arg missing: --pcap <path>\n"); return -1; }
+ return 0;
+}
+
+
+static void onPcapPkg( u_char*user, const struct pcap_pkthdr*hdr, const u_char*buf ){
+ PcapOne *const app = (void*)user;
+
+ /* prepare for this new packet */
+ app->frameNr += 1;
+ app->flg &= ~(FLG_isTcpPsh | FLG_isTcpRst | FLG_isTcpSyn | FLG_isTcpFin | FLG_isHttpReq);
+
+ /* data-link layer */
+ switch( pcap_datalink(app->pcap) ){
+ case 0x71: parse_ll_LINUX_SLL(app, hdr, buf); break;
+ default: assert(!fprintf(stderr,"pcap_datalink() -> 0x%02X\n", pcap_datalink(app->pcap)));
+ }
+
+ /* network layer */
+ switch( app->llProto ){
+ case 0x0800: parse_net_IPv4(app, hdr, buf); break;
+ default: printf("???, proto=0x%04X, network-layer\n", app->llProto); return;
+ }
+
+ /* transport layer */
+ switch( app->netProto ){
+ case 0x06: parse_trsp_TCP(app, hdr, buf); break;
+ default: printf("???, proto=0x%02X, transport-layer\n", app->netProto); return;
+ }
+
+ assert(app->trspBodyLen >= 0);
+
+ /* application layer, towards server */
+ switch( app->trspDstPort ){
+ case 80: parse_appl_HTTP_req(app, hdr, buf); break;
+ case 7012: parse_appl_HTTP_req(app, hdr, buf); break;
+ case 8080: parse_appl_HTTP_req(app, hdr, buf); break;
+ }
+
+ printParsingResults(app, hdr);
+}
+
+
+static void parse_ll_LINUX_SLL( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
+ assert(hdr->caplen >= 15);
+ app->llProto = buf[14]<<8 | buf[15];
+ app->llHdrEnd = 16;
+}
+
+
+static void parse_net_IPv4( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
+ assert(hdr->caplen >= app->llHdrEnd+19 && "TODO_775afde7f19010220e9df8d5e2924c3e");
+ int_fast8_t netHdrLen = (buf[app->llHdrEnd+0] & 0x0F) * 4;
+ app->netTotLen = buf[app->llHdrEnd+2] << 8 | buf[app->llHdrEnd+3];
+ app->netProto = buf[app->llHdrEnd+9];
+ app->ipSrcAddr = 0
+ | ((uint_least32_t)buf[app->llHdrEnd+12]) << 24
+ | ((uint_least32_t)buf[app->llHdrEnd+13]) << 16
+ | buf[app->llHdrEnd+14] << 8
+ | buf[app->llHdrEnd+15] ;
+ app->ipDstAddr = 0
+ | ((uint_least32_t)buf[app->llHdrEnd+16]) << 24
+ | ((uint_least32_t)buf[app->llHdrEnd+17]) << 16
+ | buf[app->llHdrEnd+18] << 8
+ | buf[app->llHdrEnd+19] ;
+ app->netHdrEnd = app->llHdrEnd + netHdrLen;
+ app->netBodyLen = app->netTotLen - netHdrLen;
+}
+
+
+static void parse_trsp_TCP( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
+ assert(hdr->caplen >= app->netHdrEnd+12 && "TODO_058d5f41043d383e1ba2c492d0db4b6a");
+ app->trspSrcPort = buf[app->netHdrEnd+0] << 8 | buf[app->netHdrEnd+1];
+ app->trspDstPort = buf[app->netHdrEnd+2] << 8 | buf[app->netHdrEnd+3];
+ int tcpHdrLen = (buf[app->netHdrEnd+12] >> 4) * 4;
+ app->trspHdrEnd = app->netHdrEnd + tcpHdrLen;
+ app->trspBodyLen = app->netBodyLen - tcpHdrLen;
+}
+
+
+static void parse_appl_HTTP_req( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
+ app->flg |= FLG_isHttpReq;
+ app->httpReqHeadline = buf + app->trspHdrEnd;
+ app->httpReqHeadline_len = 0;
+ for(;; ++app->httpReqHeadline_len ){
+ if( (app->trspHdrEnd + app->httpReqHeadline_len) > hdr->caplen ) break;
+ if( app->httpReqHeadline[app->httpReqHeadline_len] == '\r' ) break;
+ if( app->httpReqHeadline[app->httpReqHeadline_len] == '\n' ) break;
+ }
+ /* TODO improve, as now its like a guess only */
+ int isNewRequest = 0
+ | !memcmp(buf + app->trspHdrEnd, "GET ", 4)
+ | !memcmp(buf + app->trspHdrEnd, "PUT ", 4)
+ | !memcmp(buf + app->trspHdrEnd, "POST ", 5)
+ | !memcmp(buf + app->trspHdrEnd, "DELETE ", 7)
+ ;
+ if( isNewRequest ){
+ app->httpReq_off = 0;
+ }else{
+ app->httpReq_off = 42; /*TODO make more accurate*/
+ }
+}
+
+
+static void printParsingResults( PcapOne*app, const struct pcap_pkthdr*hdr ){
+
+ int isHttpRequest = (app->flg & FLG_isHttpReq);
+ int isHttpReqBegin = isHttpRequest && app->httpReq_off == 0;
+
+ if( isHttpRequest && isHttpReqBegin ){
+ /* find http method */
+ const uint8_t *method = app->httpReqHeadline;
+ int method_len = 0;
+ for(;; ++method_len ){
+ if( method_len > app->httpReqHeadline_len ) break;
+ if( method[method_len] == ' ' ) break;
+ }
+ /* find http uri */
+ const uint8_t *uri = method + method_len + 1;
+ int uri_len = 0;
+ for(;; ++uri_len ){
+ if( method_len + uri_len > app->httpReqHeadline_len ) break;
+ if( uri[uri_len] == ' ' ) break;
+ }
+ if( !(app->flg & FLG_isHdrPrinted) ){
+ app->flg |= FLG_isHdrPrinted;
+ printf("h;Title;HTTP requests\n");
+ printf("c;epochSec;srcIp;dstIp;srcPort;dstPort;http_method;http_uri\n");
+ }
+ /* print it as a quick-n-dirty CSV record */
+ printf("r;%ld.%06ld;%d.%d.%d.%d;%d.%d.%d.%d;%d;%d;%.*s;%.*s\n",
+ hdr->ts.tv_sec, hdr->ts.tv_usec,
+ app->ipSrcAddr >> 24, app->ipSrcAddr >> 16 & 0xFF, app->ipSrcAddr >> 8 & 0xFF, app->ipSrcAddr & 0xFF,
+ app->ipDstAddr >> 24, app->ipDstAddr >> 16 & 0xFF, app->ipDstAddr >> 8 & 0xFF, app->ipDstAddr & 0xFF,
+ app->trspSrcPort, app->trspDstPort,
+ method_len, method, uri_len, uri);
+ }
+}
+
+
+static int run( PcapOne*app ){
+ int err;
+ err = pcap_init(PCAP_CHAR_ENC_UTF_8, app->pcapErrbuf);
+ if( err == PCAP_ERROR ){
+ fprintf(stderr, "libpcap: %s\n", app->pcapErrbuf); err = -1; goto endFn; }
+ app->pcap = pcap_open_offline(
+ (app->dumpFilePath == DEV_STDIN) ? "-" : app->dumpFilePath,
+ app->pcapErrbuf);
+ if( app->pcap == NULL ){
+ fprintf(stderr, "libpcap: %s\n", app->pcapErrbuf); err = -1; goto endFn; }
+ for(;;){
+ err = pcap_dispatch(app->pcap, -1, onPcapPkg, (void*)app);
+ switch( err ){
+ case PCAP_ERROR:
+ fprintf(stderr, "pcap_dispatch(): %s\n", pcap_geterr(app->pcap));
+ err = -1; goto endFn;
+ case PCAP_ERROR_BREAK:
+ case PCAP_ERROR_NOT_ACTIVATED:
+ fprintf(stderr, "pcap_dispatch() -> %d\n", err);
+ err = -1; goto endFn;
+ }
+ if( err > 0 ){
+ fprintf(stderr, "Processed %d packages in this turn.\n", err);
+ continue;
+ }
+ break;
+ }
+ err = 0;
+endFn:
+ if( app->pcap != NULL ){ pcap_close(app->pcap); app->pcap = NULL; }
+ return err;
+}
+
+
+int main( int argc, char**argv ){
+ int err;
+ static char errbuf[PCAP_ERRBUF_SIZE];
+ errbuf[0] = '\0';
+ PcapOne app = {
+ .flg = FLG_INIT,
+ .pcapErrbuf = errbuf,
+ .pcap = NULL,
+ .frameNr = 0,
+ .trspBodyLen = 0,
+ };
+ #define app (&app)
+
+ err = parseArgs(app, argc, argv);
+ if( err ){ goto endFn; }
+
+ if( app->flg & FLG_isHelp ){
+ printHelp(); goto endFn; }
+
+ err = run(app);
+
+endFn:
+ if( err < 0 ) err = -err;
+ if( err > 0x7F ) err = 1;
+ return err;
+ #undef app
+}
+
+
diff --git a/src/main/c/common/commonKludge.h b/src/main/c/common/commonKludge.h
new file mode 100644
index 0000000..e0f0cba
--- /dev/null
+++ b/src/main/c/common/commonKludge.h
@@ -0,0 +1,16 @@
+
+typedef unsigned char uchar;
+
+#define STRQUOT_ASDFASDF(s) #s
+#define STRQUOT(s) STRQUOT_ASDFASDF(s)
+#ifndef PROJECT_VERSION
+# define PROJECT_VERSION 0.0.0-SNAPSHOT
+#endif
+
+#if __WIN32
+ int _setmode(int,int);
+# define FUCK_BROKEN_SYSTEMS() do{char a=0;for(;!(a&10);){_setmode(a++,32768);}}while(0)
+#else
+# define FUCK_BROKEN_SYSTEMS()
+#endif
+
diff --git a/src/main/c/foo/Asn1Digger.c b/src/main/c/foo/Asn1Digger.c
new file mode 100644
index 0000000..15cdc69
--- /dev/null
+++ b/src/main/c/foo/Asn1Digger.c
@@ -0,0 +1,416 @@
+/*
+ * openssl asn1parse -i -dlimit 9999
+ */
+
+#include "commonKludge.h"
+
+/* System */
+#include <assert.h>
+#include <errno.h>
+#include <inttypes.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+#define FLG_isHelp (1<<0)
+#define FLG_innIsEof (1<<1)
+#define FLG_assumeGpg (1<<2)
+#define FLG_INIT (0)
+
+#define FREAD(buf, sz, cnt, ctx) fread(buf, sz, cnt, ctx)
+
+#ifndef NDEBUG
+# define IF_DBG(expr) expr;
+#else
+# define IF_DBG(expr)
+#endif
+
+
+typedef enum FuncToCall FuncToCall;
+typedef struct AsnDigger AsnDigger;
+
+
+enum FuncToCall {
+ FUNC_NONE=0,
+ FUNC_asnType,
+ FUNC_asnLength,
+ FUNC_asnValue,
+ FUNC_hexDump_readChunk,
+ FUNC_hexDump_dumpFromBuf,
+};
+
+
+struct AsnDigger {
+ unsigned flg;
+ long hexCols; /* num hex cols wanted by user */
+ uchar type; /* ASN.1 type */
+ uchar typeFlg; /* ASN.1 flags from the 'type' octet */
+ unsigned len; /* ASN.1 length */
+ uchar lenFlg; /* flags from the ASN.1 'length' octet */
+ int hexDumpOffs; /* how many bytes we're offset into current value in context of printing it */
+ int remainValueBytes; /* how many bytes we still need to process from 'value' */
+ FuncToCall funcToCall;
+ int typeNameBuf_cap;
+ char typeNameBuf[sizeof"subType 0xFF, ObjectIdentifier, constructed"];
+ int asciBuf_cap, asciBuf_len;
+ char asciBuf[48+1];
+ int opStack_cap, opStack_len;
+ uchar opStack[1<<8]; /* operation stack, holding recursion information (eg nesting of sequences) */
+ int innBuf_cap, innBuf_len;
+ uchar innBuf[1<<15];
+};
+
+
+/* BEG func fwd decls */
+static void constructPrintableTypename( AsnDigger*app );
+/* END func fwd decls */
+
+
+//static size_t myFread( void*restrict buf, size_t sz, size_t cnt, FILE*restrict cls ){
+// size_t ret = fread(buf, sz, cnt, cls);
+// fprintf(stderr, "fread(buf, %llu, %llu, cls) -> %llu\n", sz, cnt, ret);
+// return ret;
+//}
+
+
+static void printHelp(){
+ printf("%s%s%s", " \n"
+ " ", strrchr(__FILE__,'/')+1, " @ " STRQUOT(PROJECT_VERSION) "\n"
+ " \n"
+ " Print ASN.1 from stdin to a textual representation on stdout.\n"
+ " \n"
+ " Options:\n"
+ " \n"
+ " -c <num>\n"
+ " Number of columns to use for hex-dumps. Defaults to 16.\n"
+ " \n"
+ " --gpg\n"
+ " Assume GPG and print additional info for it.\n"
+ " \n");
+}
+
+
+static int parseArgs( int argc, char**argv, AsnDigger*app ){
+ app->flg = FLG_INIT;
+ app->hexCols = 16;
+ for( int iArg=1 ; iArg<argc ; ++iArg ){
+ const char *arg = argv[iArg];
+ if(0){
+ }else if( !strcmp(arg,"--help") ){
+ app->flg |= FLG_isHelp; return 0;
+ }else if( !strcmp(arg,"-c") ){
+ arg = argv[++iArg];
+ if( iArg >= argc ){ fprintf(stderr,"EINVAL: -c needs value\n"); return -1; }
+ errno = 0;
+ app->hexCols = strtol(arg, NULL, 0);
+ if( errno != 0 ){ fprintf(stderr, "EINVAL: -c: %s\n", strerror(errno)); return -1; }
+ }else if( !strcmp(arg, "--gpg") ){
+ app->flg |= FLG_assumeGpg;
+ }else if( !strcmp(arg, "--no-gpg") ){
+ app->flg &= ~FLG_assumeGpg;
+ }else{
+ fprintf(stderr, "EINVAL: '%s'\n", arg); return -1;
+ }
+ }
+ if( app->hexCols <= 0 || app->hexCols > 48 ){
+ fprintf(stderr, "ENOTSUP: %ld hex columns not supported.\n", app->hexCols); return -1;
+ }
+ return 0;
+}
+
+
+//static unsigned char* ucharPtrOfcharPtr( char*c ){ return (void*)c; }
+//static char* charPtrOfucharPtr( unsigned char*c ){ return (void*)c; }
+
+
+static void opStackPush( AsnDigger*app, uchar*buf, int len ){
+ if( app->opStack_len + len >= app->opStack_cap ){
+ fprintf(stderr, "%s: Internal operation stack overflow\n", strrchr(__FILE__,'/')+1);
+ abort();
+ }
+ memcpy(buf, app->opStack + app->opStack_len, len);
+ app->opStack_len += len;
+}
+
+
+static void opStackPop( AsnDigger*app, uchar*retval, int len ){
+ assert(app->opStack_len >= len);
+ app->opStack_len -= len;
+ if( retval != NULL ){
+ memcpy(retval, app->opStack + app->opStack_len, len);
+ }
+}
+
+
+static int asnType( AsnDigger*app ){
+ size_t sz;
+ int err;
+ uchar type[1];
+ sz = FREAD(type, 1, 1, stdin);
+ if( sz != 1 ){
+ err = errno;
+ if( feof(stdin) ){
+ app->flg |= FLG_innIsEof;
+ IF_DBG(app->funcToCall = FUNC_NONE);
+ return 0;
+ }else{
+ fprintf(stderr, "%s STDIN: %s\n", strrchr(__FILE__,'/')+1, strerror(err));
+ return -err;
+ }
+ }
+ app->type = type[0] & 0x1F;
+ app->typeFlg = type[0] & 0xE0;
+ app->funcToCall = FUNC_asnLength;
+ return 0;
+}
+
+
+static int asnLength( AsnDigger*app ){
+ size_t sz;
+ int err;
+ int_fast16_t numBytesRead = 0;
+ uchar len[1];
+ int_fast8_t isLongType;
+
+ for(;;){
+readNextByte:
+ sz = FREAD(len, 1, 1, stdin);
+ if( sz != 1 ){
+ if( feof(stdin) ){
+ app->flg |= FLG_innIsEof;
+ IF_DBG(app->funcToCall = FUNC_NONE);
+ return 0;
+ }else{
+ err = errno;
+ fprintf(stderr, "%s STDIN: %s\n", strrchr(__FILE__,'/')+1, strerror(err));
+ return -err;
+ }
+ }
+ numBytesRead += 1;
+
+ if( numBytesRead == 1 ){
+ isLongType = (len[0] & 0x1F) == 0x1F;
+ app->len = isLongType ? 0 : (len[0] & 0x1F);
+ app->lenFlg = len[0] & 0xE0;
+ }
+ if( isLongType ){
+ if( numBytesRead > sizeof(app->len)*8/7 ){
+ fprintf(stderr, "%s ENOTSUP: Cannot handle tag length encoded in more than %ld bytes\n",
+ strrchr(__FILE__,'/')+1, (long)numBytesRead-1);
+ return -ENOTSUP;
+ }else{
+ app->len = (app->len << numBytesRead*7) | (len[0] & 0x7F);
+ const int_fast8_t hasMoreBytes = len[0] & 0x80;
+ if( hasMoreBytes ){
+ goto readNextByte;
+ }else{
+ goto setupValueFuncThenReturn;
+ }
+ }
+ }else{
+ app->len = len[0] & 0x1F;
+ app->lenFlg = len[0] & 0xE0;
+ goto setupValueFuncThenReturn;
+ }
+ }
+
+setupValueFuncThenReturn:
+ app->funcToCall = FUNC_asnValue;
+ return 0;
+}
+
+
+static int asnValue( AsnDigger*app ){
+
+ constructPrintableTypename(app);
+ printf("ASN.1 type 0x%02X, typeFlgs 0x%02X, len %d, lenFlgs 0x%02X (%s)%s",
+ app->type, app->typeFlg, app->len, app->lenFlg, app->typeNameBuf, app->len?", value:":"");
+
+ const int isSequence = (app->type == 0x10);
+ if( app->len == 0 || isSequence ){
+ /* no payload. Ready to go to next tag. */
+ printf("\n");
+ app->funcToCall = FUNC_asnType;
+ return 0;
+ }
+
+ /* go process payload */
+ app->remainValueBytes = app->len;
+ app->hexDumpOffs = 0;
+ app->asciBuf_len = 0;
+ app->funcToCall = FUNC_hexDump_readChunk;
+ return 0;
+}
+
+
+static int hexDump_readChunk( AsnDigger*app ){
+ #define MIN(a, b) ((a) < (b) ? (a) : (b))
+ #define IS_PRINTABLE(c) (c >= 0x20 && c <= 0x7E)
+ int_fast32_t err;
+
+ size_t readLen = MIN(app->remainValueBytes, app->innBuf_cap);
+ err = readLen % app->hexCols;
+ if( err != 0 && readLen < err ){ /*align buffer to make printing of hexDump easier*/
+ readLen -= err; }
+ assert(readLen > 0);
+ readLen = FREAD(app->innBuf, 1, readLen, stdin);
+ if( readLen == 0 ){
+ err = errno;
+ if( feof(stdin) ){
+ app->flg |= FLG_innIsEof;
+ IF_DBG(app->funcToCall = FUNC_NONE);
+ if( app->remainValueBytes > 0 ){
+ fprintf(stderr, "%s STDIN: Unexpected EOF\n", strrchr(__FILE__,'/')+1);
+ return -1;
+ }
+ return 0;
+ }else{
+ fprintf(stderr, "%s STDIN: %s\n", strrchr(__FILE__,'/')+1, strerror(err));
+ return -1;
+ }
+ }
+ assert(app->remainValueBytes >= readLen);
+ app->innBuf_len = readLen;
+ app->remainValueBytes -= readLen;
+ app->funcToCall = FUNC_hexDump_dumpFromBuf;
+ return 0;
+}
+
+
+static int hexDump_dumpFromBuf( AsnDigger*app ){
+
+ int_fast32_t err;
+ const int_fast8_t isFirstRun = app->remainValueBytes == (app->len - app->innBuf_len);
+
+ if( (app->flg & FLG_assumeGpg) && isFirstRun ){
+ if( app->type == 0x95 ){
+ printf("\nGPG secret key packet, version %d", app->innBuf[1]);
+ }else if( app->type == 0x99 && app->len == 1 && app->innBuf[0] == 0x0D ){
+ puts("\nGPG certificate");
+ }
+ }
+
+ /* print hex column */
+ int iChr;
+ for( iChr=0 ; iChr < app->innBuf_len ; ++iChr,++app->hexDumpOffs ){
+ if( app->hexDumpOffs % app->hexCols == 0 ){
+ /* start next hexDump line */
+ printf(" %.*s\n %08X:", app->asciBuf_len, app->asciBuf, app->hexDumpOffs);
+ app->asciBuf_len = 0;
+ }
+ printf(" %02X", app->innBuf[iChr]);
+ /* cache ASCI part (right column of hex-dump) to write it later at EOL */
+ app->asciBuf[app->asciBuf_len++] = IS_PRINTABLE(app->innBuf[iChr]) ? app->innBuf[iChr] : '.';
+ }
+
+ /* print asci column */
+ if( app->asciBuf_len > 0 ){
+ err = iChr % app->hexCols;
+ for(; err < app->hexCols ; ++err ){ printf(" "); } /* fill hexDump space on last line */
+ printf(" %.*s\n", app->asciBuf_len, app->asciBuf); /* print asci chars */
+ app->asciBuf_len = 0;
+ }
+
+ if( app->remainValueBytes == 0 ){
+ app->funcToCall = FUNC_asnType; /* done here. Go read next asn tag. */
+ }else{
+ app->funcToCall = FUNC_hexDump_readChunk; /* there's more data for current tag */
+ }
+
+ return 0;
+ #undef MIN
+ #undef IS_PRINTABLE
+}
+
+
+static void constructPrintableTypename( AsnDigger*app ){
+ int err;
+ switch( app->type ){
+ case 0x00: memcpy(app->typeNameBuf, "EndOfContent", 13); break;
+ case 0x02: memcpy(app->typeNameBuf, "Integer", 8); break;
+ case 0x04: memcpy(app->typeNameBuf, "OctetString", 12); break;
+ case 0x05: memcpy(app->typeNameBuf, "null", 5); break;
+ case 0x06: memcpy(app->typeNameBuf, "ObjectIdentifier", 17); break;
+ case 0x08: memcpy(app->typeNameBuf, "External", 9); break;
+ case 0x0C: memcpy(app->typeNameBuf, "Utf8String", 11); break;
+ case 0x0F: memcpy(app->typeNameBuf, "Reserved!!", 11); break;
+ case 0x10: memcpy(app->typeNameBuf, "Sequence", 9); break;
+ case 0x12: memcpy(app->typeNameBuf, "NumericString", 14); break;
+ case 0x13: memcpy(app->typeNameBuf, "PrintableString", 16); break;
+ case 0x18: memcpy(app->typeNameBuf, "GeneralizedTime", 16); break;
+ case 0x1C: memcpy(app->typeNameBuf, "UniversalString", 16); break;
+ case 0x1D: memcpy(app->typeNameBuf, "CharacterString", 16); break;
+ case 0x23: memcpy(app->typeNameBuf, "OID-IRI", 8); break;
+ default:;
+ /* construct some generified name with help of passed buffer */
+ const char *tagClass;
+ if( (app->typeFlg & 0xC0) == 0 ){ tagClass = "Universal"; }
+ else if( (app->typeFlg & 0x40) == 0 ){ tagClass = "Application"; }
+ else if( (app->typeFlg & 0x80) == 0 ){ tagClass = "ContextSpecific"; }
+ else{ tagClass = "Private"; }
+ const char *primOrConstr = (app->typeFlg & 0x20) ? "constructed" : "primitive";
+ int isLongType = app->type == 0x1F;
+ if( isLongType ){
+ err = snprintf(app->typeNameBuf, app->typeNameBuf_cap, "LongType, %s, %s",
+ tagClass, primOrConstr);
+ assert(err < app->typeNameBuf_cap);
+ }else{
+ err = snprintf(app->typeNameBuf, app->typeNameBuf_cap, "subType 0x%02X, %s, %s",
+ app->type, tagClass, primOrConstr);
+ assert(err < app->typeNameBuf_cap);
+ }
+ }
+}
+
+
+static inline int breakBeforeDispatch( int i ){ return i; }
+
+
+static int run( AsnDigger*app ){
+ int err;
+ app->funcToCall = FUNC_asnType;
+ while( (app->flg & FLG_innIsEof) == 0 ){
+ switch( breakBeforeDispatch(app->funcToCall) ){
+ case FUNC_asnType: err = asnType(app); break;
+ case FUNC_asnLength: err = asnLength(app); break;
+ case FUNC_asnValue: err = asnValue(app); break;
+ case FUNC_hexDump_readChunk: err = hexDump_readChunk(app); break;
+ case FUNC_hexDump_dumpFromBuf: err = hexDump_dumpFromBuf(app); break;
+ default:
+ IF_DBG(fprintf(stderr,"Whops %d %s:%d\n", app->funcToCall, __FILE__, __LINE__));
+ abort();
+ }
+ if( err != 0 ){ return err; }
+ }
+ return 0;
+}
+
+
+int main( int argc, char**argv ){
+ int err;
+ AsnDigger app;
+ #define app (&app)
+ app->typeNameBuf_cap = sizeof app->typeNameBuf;
+ app->asciBuf_cap = sizeof app->asciBuf;
+ app->asciBuf_len = 0;
+ app->opStack_cap = sizeof app->opStack;
+ app->opStack_len = 0;
+ app->innBuf_cap = sizeof app->innBuf;
+ app->innBuf_len = 0;
+
+ if( (err=parseArgs(argc, argv, app)) != 0 ){ goto endFn; }
+
+ if( app->flg & FLG_isHelp ){ printHelp(app); err = 0; goto endFn; }
+
+ FUCK_BROKEN_SYSTEMS();
+
+ err = run(app);
+
+endFn:
+ return !!err;
+ #undef app
+}
+
+
+
diff --git a/src/main/c/foo/PemCodec.c b/src/main/c/foo/PemCodec.c
new file mode 100644
index 0000000..d6bc4ab
--- /dev/null
+++ b/src/main/c/foo/PemCodec.c
@@ -0,0 +1,218 @@
+
+#include "commonKludge.h"
+
+/* System */
+#include <assert.h>
+#include <errno.h>
+#include <inttypes.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#define BUF_CAP (1<<15)
+
+typedef struct PemCodec PemCodec;
+typedef unsigned long long ulong;
+
+
+enum Mode {
+ MODE_NONE = 0,
+ MODE_ENCODE,
+ MODE_DECODE,
+};
+
+
+struct PemCodec {
+ int isHelp; /* TODO flg */
+ enum Mode mode; /* one of "ENCODE" or "DECODE" */
+ int buf_len;
+ unsigned char buf[BUF_CAP];
+};
+
+
+static void printHelp(){
+ printf("%s%s%s", " \n"
+ " ", strrchr(__FILE__,'/')+1, " @ " STRQUOT(PROJECT_VERSION) "\n"
+ " \n"
+ " encode/decode PEM (PrivacyEnhancedMail) from stdin to stdout.\n"
+ " \n"
+ " HINT: Encode is not yet implemented.\n"
+ " \n"
+ " Options:\n"
+ " \n"
+ " -d decode\n"
+ " \n");
+}
+
+
+static int parseArgs( int argc, char**argv, PemCodec*app ){
+ app->isHelp = 0;
+ app->mode = MODE_ENCODE;
+ if( argc == 1 ){ fprintf(stderr, "EINVAL: Args missing\n"); return -1; }
+ for( int iArg=1 ; iArg<argc ; ++iArg ){
+ const char *arg = argv[iArg];
+ if(0){
+ }else if( !strcmp(arg,"--help") ){
+ app->isHelp = !0; return 0;
+ }else if( !strcmp(arg,"-d") ){
+ app->mode = MODE_DECODE;
+ }else{
+ fprintf(stderr, "EINVAL: '%s'\n", arg); return -1;
+ }
+ }
+ return 0;
+}
+
+
+static int decodeB64UpToDash( PemCodec*app ){
+ int err;
+ size_t sz;
+ int iByte;
+ int sextets[4];
+
+readFourInputOctets:
+ iByte = 0;
+
+readNextInputOctet:
+ sz = fread(app->buf, 1, 1, stdin);
+ if( sz != 1 ){ assert(!"TODO_20230825155237"); }
+
+ /* TODO may use switch-case instead */
+ if(0){
+ }else if( app->buf[0] >= 'A' && app->buf[0] <= 'Z' ){
+ sextets[iByte] = app->buf[0] - 65;
+ }else if( app->buf[0] >= 'a' && app->buf[0] <= 'z' ){
+ sextets[iByte] = app->buf[0] - 71;
+ }else if( app->buf[0] >= '0' && app->buf[0] <= '9' ){
+ sextets[iByte] = app->buf[0] + 4;
+ }else if( app->buf[0] == '+' ){
+ sextets[iByte] = 63;
+ }else if( app->buf[0] == '/' ){
+ sextets[iByte] = 64;
+ }else if( app->buf[0] == '=' ){
+ sextets[iByte] = 0;
+ }else if( app->buf[0] == '\n' ){ /* ignore newlines */
+ goto readNextInputOctet;
+ }else if( app->buf[0] == '-' ){ /* EndOf b64 data */
+ goto endFn;
+ }else{
+ fprintf(stderr, "Unexpected octet 0x%02X in b64 stream\n", app->buf[0]);
+ err = -1; goto endFn;
+ }
+
+ if( ++iByte < 4 ) goto readNextInputOctet; /* aka loop */
+
+ /* output as the three original binary octets */
+ err = printf("%c%c%c",
+ ( sextets[0] << 2) | (sextets[1] >> 4) ,
+ ((sextets[1] & 0xF) << 4) | (sextets[2] >> 2) ,
+ ((sextets[2] & 0x3) << 6) | sextets[3]
+ );
+ if( err < 0 ){
+ err = errno;
+ fprintf(stderr, "printf: %s\n", strerror(errno));
+ err = -errno; goto endFn;
+ }
+
+ goto readFourInputOctets; /* aka loop */
+
+ err = 0;
+endFn:
+ return err;
+}
+
+
+static int decodePem( PemCodec*app ){
+ int err;
+ size_t sz;
+
+ sz = fread(app->buf, 11, 1, stdin);
+ if( sz != 1 ){
+ const char *fmt = feof(stdin)
+ ? "Unexpected EOF while reading PEM header: %s\n"
+ : "Cannot read PEM header: %s\n";
+ fprintf(stderr, fmt, strerror(errno));
+ err = -1; goto endFn;
+ }
+
+ if( memcmp(app->buf, "-----BEGIN ", 11) ){
+ fprintf(stderr, "EINVAL: No valid PEM header found\n");
+ err = -1; goto endFn;
+ }
+
+ /* read until EOL */
+ int numDashesInSequence = 0;
+ for(;;){
+ sz = fread(app->buf, 1, 1, stdin);
+ if( sz != 1 ){
+ const char *fmt = feof(stdin)
+ ? "Unexpected EOF while reading PEM header: %s\n"
+ : "Cannot read PEM header: %s\n";
+ fprintf(stderr, fmt, strerror(errno));
+ err = -1; goto endFn;
+ }
+ if( app->buf[0] == '\n' ){
+ if( numDashesInSequence != 5 ){
+ fprintf(stderr, "EINVAL: No valid PEM header found\n");
+ err = -1; goto endFn;
+ }
+ break;
+ }
+ if( app->buf[0] == '-' ){
+ numDashesInSequence += 1;
+ }else{
+ numDashesInSequence = 0;
+ }
+ }
+
+ if( (err=decodeB64UpToDash(app)) < 0 ){ goto endFn; }
+
+ /* readEndOfPemLine. 1st dash got already consumed in func above */
+ sz = fread(app->buf, 8, 1, stdin);
+ if( sz != 1 || memcmp(app->buf, "----END ", 8)){
+ assert(fprintf(stderr, "sz=%llu\n", (ulong)sz));
+ goto warnAndDrain;
+ }
+ /* assume rest of trailer is ok */
+ goto drain;
+
+warnAndDrain:
+ fprintf(stderr, "WARN: PEM trailer broken\n");
+
+drain:
+ sz = fread(app->buf, BUF_CAP, 1, stdin);
+ if( sz > 0 ) goto drain;
+ if( ferror(stdin) ){
+ fprintf(stderr, "fread: %s\n", strerror(errno));
+ err = -1; goto endFn;
+ }
+
+ err = 0;
+endFn:
+ return err;
+}
+
+
+int main( int argc, char**argv ){
+ int err;
+ PemCodec app;
+ #define app (&app)
+
+ if( (err=parseArgs(argc, argv, app)) != 0 ){ goto endFn; }
+
+ if( app->isHelp ){ printHelp(app); err = 0; goto endFn; }
+
+ if( app->mode == MODE_ENCODE ){
+ fprintf(stderr, "ENOTSUP: PEM Encode not implented yet\n");
+ err = -1; goto endFn;
+ }else{
+ assert(app->mode == MODE_DECODE);
+ err = decodePem(app);
+ goto endFn;
+ }
+
+endFn:
+ return !!err;
+ #undef app
+}
+
diff --git a/src/main/firefox/gaga-plugin/main.js b/src/main/firefox/gaga-plugin/main.js
new file mode 100644
index 0000000..2a5bbae
--- /dev/null
+++ b/src/main/firefox/gaga-plugin/main.js
@@ -0,0 +1,197 @@
+/*
+ * For how to install see:
+ *
+ * "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/tree/doc/note/firefox/firefox.txt"
+ */
+;(function(){ try{
+
+ var NDEBUG = false;
+ var STATUS_INIT = 1;
+ var STATUS_RUNNING = 2;
+ var STATUS_DONE = 3;
+ var STATUS_OBSOLETE = 4;
+ var NOOP = function(){};
+ var LOGERR = console.error.bind(console);
+ var N = null;
+ var setTimeout, logErrors, LOGDBG;
+
+
+ function main(){
+ var app = Object.seal({
+ ui: {},
+ status: Object.seal({
+ checklistBtn: STATUS_INIT,
+ developmentBtn: STATUS_INIT,
+ }),
+ lastClickEpochMs: 0,
+ });
+ if( NDEBUG ){
+ setTimeout = window.setTimeout;
+ logErrors = function(app, fn){ fn(arguments.slice(2)); }
+ LOGDBG = NOOP;
+ }else{ /* fix broken tooling */
+ setTimeout = setTimeoutWithCatch.bind(0, app);
+ logErrors = logErrorsImpl.bind(N, app);
+ LOGDBG = console.debug.bind(console);
+ }
+ document.addEventListener("DOMContentLoaded", logErrors.bind(N, onDOMContentLoaded, app));
+ }
+
+
+ function onDOMContentLoaded( app ){
+ cleanupClutter(app);
+ attachDomObserver(app);
+ }
+
+
+ function attachDomObserver( app ){
+ new MutationObserver(onDomHasChangedSomehow.bind(N, app))
+ .observe(document, { subtree:true, childList:true, attributes:true });
+ }
+
+
+ function onDomHasChangedSomehow( app, changes, mutationObserver ){
+ var nowEpochMs = Date.now();
+ if( (app.lastClickEpochMs + 2000) > nowEpochMs ){
+ LOGDBG("ignore, likely triggered by user.");
+ return; }
+ var needsReEval = false;
+ for( var change of changes ){
+ if( change.target.nodeName != "BUTTON" ) continue;
+ var isAriaExpanded = (change.attributeName == "aria-expanded");
+ var isChildAdded = (change.addedNodes.length > 0);
+ var isChildRemoved = (change.removedNodes.length > 0);
+ var isChildAddedOrRemoved = isChildAdded || isChildRemoved;
+ if( !isAriaExpanded && !isChildAddedOrRemoved ) continue;
+ if( isAriaExpanded ){
+ LOGDBG("Suspicious, isExpanded: ", change.target);
+ needsReEval = true; break;
+ }
+ if( !isChildAddedOrRemoved ) continue;
+ var isBloatyChecklistBtnStillThere = document.body.contains(getBloatyChecklistBtn(app));
+ if( !isBloatyChecklistBtnStillThere ){
+ LOGDBG("Suspicious, btn lost");
+ needsReEval = true; break;
+ }
+ var isBloatyDevelopmentBtnStillThere = document.body.contains(getBloatyDevelopmentBtn(app));
+ if( !isBloatyDevelopmentBtnStillThere ){
+ LOGDBG("Suspicious, btn lost");
+ needsReEval = true; break;
+ }
+ }
+ if( needsReEval ){
+ LOGDBG("Change detected! Eval again");
+ app.ui.bloatyChecklistBtn = null;
+ app.ui.bloatyDevelopmentBtn = null;
+ setTimeout(cleanupClutter, 42, app);
+ }
+ }
+
+
+ function cleanupClutter( app ){
+ if( app.bloatyChecklistDone != STATUS_RUNNING ){
+ app.bloatyChecklistDone = STATUS_OBSOLETE
+ setTimeout(hideBloatyButton, 0, app, "checklistBtn");
+ }
+ if( app.bloatyDevelopmentDone != STATUS_RUNNING ){
+ app.bloatyDevelopmentDone = STATUS_OBSOLETE;
+ setTimeout(hideBloatyButton, 0, app, "developmentBtn");
+ }
+ if( app.bloatyDevelopmentDone != STATUS_RUNNING ){
+ app.bloatyDevelopmentDone = STATUS_OBSOLETE;
+ setTimeout(hideBloatyButton, 0, app, "bigTemplateBtn");
+ }
+ }
+
+
+ function setLastClickTimeToNow( app ){ app.lastClickEpochMs = Date.now(); }
+
+
+ function hideBloatyButton( app, btnKey ){
+ if( app.status[btnKey] == STATUS_DONE ){
+ LOGDBG(btnKey +" now hidden");
+ return; }
+ app.status[btnKey] == STATUS_RUNNING;
+ var btn = getBloatyButton(app, btnKey);
+ do{
+ if( !btn ){ LOGDBG(btnKey +" not found. DOM maybe not yet ready?"); break; }
+ var isExpanded = isAriaBtnExpanded(app, btn);
+ if( isExpanded === true ){
+ LOGDBG(btnKey +".click()");
+ btn.click();
+ }else if( isExpanded === false ){
+ app.status[btnKey] = STATUS_DONE;
+ }else{
+ throw Error("Neither true nor false "+ typeof(isExpanded) +" "+ isExpanded);
+ }
+ }while(0);
+ /* try later */
+ setTimeout(hideBloatyButton, 16, app, btnKey);
+ }
+
+
+ function getBloatyButton( app, btnKey ){
+ if(0){
+ }else if( btnKey == "checklistBtn" ){
+ var selector = "button[aria-label=Checklists]";
+ var uiKey = "bloatyChecklistBtn";
+ }else if( btnKey == "developmentBtn" ){
+ var selector = "button[aria-label=Development]";
+ var uiKey = "bloatyDevelopmentBtn";
+ }else if( btnKey == "bigTemplateBtn" ){
+ var selector = "button[aria-label=BigTemplate]";
+ var uiKey = "bloatyBigTemplateBtn";
+ }else{
+ throw Error(btnKey);
+ }
+ if( !app.ui[uiKey] ){
+ var btn = fetchUiRefOrNull(app, document, selector);
+ if( btn ){
+ btn.addEventListener("mousedown", logErrors.bind(N, setLastClickTimeToNow, app));
+ app.ui[uiKey] = btn;
+ }
+ }
+ return app.ui[uiKey];
+ }
+
+
+ function isAriaBtnExpanded( app, btnElem ){
+ var value = btnElem.getAttribute("aria-expanded");
+ if( value === "true" ){
+ return true;
+ }else if( value === "false" ){
+ return false;
+ }else{
+ throw Error("btn[aria-expand] is '"+ value +"'");
+ }
+ }
+
+
+ function fetchUiRefOrNull( app, searchRoot, query ){
+ var elems = searchRoot.querySelectorAll(query);
+ if( elems.length > 1 ){ throw Error("Not unique: "+ query); }
+ if( elems.length !== 1 ){ return null; }
+ return elems[0];
+ }
+
+
+ function setTimeoutWithCatch( app, func, ms, a1, a2, a3, a4, a5, a6 ){
+ if( typeof(app) != "object" ){ LOGERR("E_20230718192813 ", app); return; }
+ if( typeof(func) != "function" ){ LOGERR("E_20230718192821", func); return; }
+ if( typeof(ms) != "number" ){ LOGERR("E_20230718192830", ms); return; }
+ window.setTimeout(logErrors, ms, func, a1, a2, a3, a4, a5, a6);
+ }
+
+
+ function logErrorsImpl( app, func, a1, a2, a3, a4, a5, a6, a7, a8, a9 ){
+ try{
+ func(a1, a2, a3, a4, a5, a6, a7, a8, a9);
+ }catch( ex ){
+ LOGERR(ex);
+ }
+ }
+
+
+ main();
+
+}catch(ex){console.error(ex); throw ex;}}());
diff --git a/src/main/firefox/gaga-plugin/manifest.json b/src/main/firefox/gaga-plugin/manifest.json
new file mode 100644
index 0000000..645dbda
--- /dev/null
+++ b/src/main/firefox/gaga-plugin/manifest.json
@@ -0,0 +1,18 @@
+{
+ "manifest_version": 2,
+ "name": "Gaga Plugin",
+ "version": "0.0.0",
+ "description": "Just a nonsense plugin to test out how stuff works.",
+ "browser_specific_settings": {
+ "gecko": {
+ "id": "971fdae123170c1f2e46f933a2z4hgiuhrlc7d0571620c0cc530e4497137773fd810@example.com"
+ }
+ },
+ "content_scripts": [
+ {
+ "matches": [ "https://jira.post.ch/*" ],
+ "run_at": "document_start",
+ "js": [ "main.js" ]
+ }
+ ]
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/json/JsonUtils.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/json/JsonUtils.java
index f860b85..1103e65 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/json/JsonUtils.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/json/JsonUtils.java
@@ -1,7 +1,5 @@
package ch.hiddenalpha.unspecifiedgarbage.json;
-import java.io.IOException;
-import java.util.LinkedHashMap;
import java.util.Map;
@@ -24,10 +22,10 @@ public class JsonUtils {
*/
public static
<TreeToValueFunc extends ObjectCodecIface<JsonNode, Map>, JsonNode, K, V>
- Map<K, V> decodeMap( TreeToValueFunc treeToValueFunc, JsonNode mapNode, Class<K> keyType, Class<V> valueType ) throws IOException {
+ Map<K, V> decodeMap( TreeToValueFunc treeToValueFunc, JsonNode mapNode, Class<K> keyType, Class<V> valueType ) throws java.io.IOException {
final Map<K, V> envVars;
if( mapNode == null ){
- envVars = new LinkedHashMap<>();
+ envVars = new java.util.LinkedHashMap<>();
}else{
envVars = treeToValueFunc.treeToValue(mapNode, Map.class);
}
@@ -35,7 +33,7 @@ public class JsonUtils {
}
public static interface ObjectCodecIface<TreeNode, Value> {
- Value treeToValue( TreeNode input, Class<Map> returnType ) throws IOException;
+ Value treeToValue( TreeNode input, Class<Map> returnType ) throws java.io.IOException;
}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteChunkOStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteChunkOStream.java
index 49959fc..e958566 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteChunkOStream.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteChunkOStream.java
@@ -1,14 +1,10 @@
package ch.hiddenalpha.unspecifiedgarbage.octetstream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.Arrays;
-
/**
* Converts an octet-stream to a push-source of byte-arrays.
*/
-public class ByteChunkOStream extends OutputStream {
+public class ByteChunkOStream extends java.io.OutputStream {
private final int chunkSize; // Hint of how large our produced chunks should be.
private byte[] buf;
@@ -26,14 +22,14 @@ public class ByteChunkOStream extends OutputStream {
}
@Override
- public void write( byte[] b, int off, int len ) throws IOException {
+ public void write( byte[] b, int off, int len ) throws java.io.IOException {
int remainingBytes = len;
while( true ){
int appendedBytes = appendToBuffer(b, off, len);
remainingBytes -= appendedBytes;
if( remainingBytes > 0 ){
publishBuffer();
- // Adjust pointers then loop and continue write remainder.
+ // Adjust cursors then loop and continue write remainder.
off += appendedBytes;
len -= appendedBytes;
}else if( remainingBytes == 0 ){
@@ -45,7 +41,7 @@ public class ByteChunkOStream extends OutputStream {
}
@Override
- public void write( int b ) throws IOException {
+ public void write( int b ) throws java.io.IOException {
while( true ){
if( appendToBuffer(b) == 0 ){
publishBuffer();
@@ -57,12 +53,12 @@ public class ByteChunkOStream extends OutputStream {
}
@Override
- public void flush() throws IOException {
+ public void flush() throws java.io.IOException {
publishBuffer();
}
@Override
- public void close() throws IOException {
+ public void close() throws java.io.IOException {
flush();
buf = null; // Think for GC
EndHandler tmp = onEnd;
@@ -95,7 +91,7 @@ public class ByteChunkOStream extends OutputStream {
}
}
- private void publishBuffer() throws IOException {
+ private void publishBuffer() throws java.io.IOException {
if( bufUsedBytes == 0 ){
return; // Nothing to do.
}
@@ -108,7 +104,7 @@ public class ByteChunkOStream extends OutputStream {
// Buffer is not completely full. So we have to make a copy so
// buf.length does report the correct value to callee. That implies
// that we can continue using our existing buffer for ourself.
- bufToPublish = Arrays.copyOfRange(this.buf, 0, bufUsedBytes);
+ bufToPublish = java.util.Arrays.copyOfRange(this.buf, 0, bufUsedBytes);
}
bufUsedBytes = 0; // Our internal buffer is now empty.
onChunk.accept(bufToPublish);
@@ -117,13 +113,13 @@ public class ByteChunkOStream extends OutputStream {
/** Inspired by {@link java.util.function.Consumer} */
public static interface ChunkHandler {
- void accept( byte[] bytes ) throws IOException;
+ void accept( byte[] bytes ) throws java.io.IOException;
}
/** Inspired by {@link java.util.function.Runnable#run()} */
public static interface EndHandler {
- void run() throws IOException;
+ void run() throws java.io.IOException;
}
}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountInputStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountInputStream.java
index fe4c763..d7d6b5f 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountInputStream.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountInputStream.java
@@ -1,22 +1,19 @@
package ch.hiddenalpha.unspecifiedgarbage.octetstream;
-import java.io.IOException;
-import java.io.InputStream;
-
/**
- * {@link InputStream} decorator to count bytes flowed through the stream.
+ * {@link java.io.InputStream} decorator to count bytes flowed through the stream.
*
*/
-public class ByteCountInputStream extends InputStream {
+public class ByteCountInputStream extends java.io.InputStream {
// TODO: mark/reset should be simple to implement.
- private final InputStream origin;
+ private final java.io.InputStream origin;
private long numBytes = 0;
- public ByteCountInputStream( InputStream origin ){
+ public ByteCountInputStream( java.io.InputStream origin ){
this.origin = origin;
}
@@ -26,7 +23,7 @@ public class ByteCountInputStream extends InputStream {
}
@Override
- public int read() throws IOException {
+ public int read() throws java.io.IOException {
int b = origin.read();
if( b >= 0 ){
numBytes += 1; }
@@ -34,7 +31,7 @@ public class ByteCountInputStream extends InputStream {
}
@Override
- public int read( byte[] b, int off, int len ) throws IOException {
+ public int read( byte[] b, int off, int len ) throws java.io.IOException {
int readLen = origin.read(b, off, len);
if( readLen > 0 ){
numBytes += readLen;
@@ -43,12 +40,12 @@ public class ByteCountInputStream extends InputStream {
}
@Override
- public int available() throws IOException {
+ public int available() throws java.io.IOException {
return origin.available();
}
@Override
- public void close() throws IOException {
+ public void close() throws java.io.IOException {
origin.close();
}
@@ -58,7 +55,7 @@ public class ByteCountInputStream extends InputStream {
//}
//@Override
- //public synchronized void reset() throws IOException {
+ //public synchronized void reset() throws java.io.IOException {
// origin.reset();
//}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountOutputStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountOutputStream.java
index 8e2b47f..61fbfdf 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountOutputStream.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ByteCountOutputStream.java
@@ -1,17 +1,15 @@
package ch.hiddenalpha.unspecifiedgarbage.octetstream;
-import java.io.IOException;
-import java.io.OutputStream;
-
/**
- * {@link OutputStream} decorator to count bytes flowed through the stream.
+ * {@link java.io.OutputStream} decorator to count bytes flowed through the stream.
*/
-public class ByteCountOutputStream extends OutputStream {
- private final OutputStream origin;
+public class ByteCountOutputStream extends java.io.OutputStream {
+
+ private final java.io.OutputStream origin;
private long numBytes = 0;
- public ByteCountOutputStream( OutputStream origin ){
+ public ByteCountOutputStream( java.io.OutputStream origin ){
this.origin = origin;
}
@@ -19,21 +17,21 @@ public class ByteCountOutputStream extends OutputStream {
public long getByteCount() { return numBytes; }
@Override
- public void write( int b ) throws IOException {
+ public void write( int b ) throws java.io.IOException {
numBytes += 1;
origin.write(b);
}
@Override
- public void write( byte[] b, int off, int len ) throws IOException {
+ public void write( byte[] b, int off, int len ) throws java.io.IOException {
numBytes += len;
origin.write(b, off, len);
}
@Override
- public void flush() throws IOException { origin.flush(); }
+ public void flush() throws java.io.IOException { origin.flush(); }
@Override
- public void close() throws IOException { origin.close(); }
+ public void close() throws java.io.IOException { origin.close(); }
}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CRLFtoLFOutputStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CRLFtoLFOutputStream.java
index 0c242aa..d88e1fe 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CRLFtoLFOutputStream.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CRLFtoLFOutputStream.java
@@ -1,25 +1,18 @@
package ch.hiddenalpha.unspecifiedgarbage.octetstream;
-import java.io.FilterOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-
-import org.slf4j.ILoggerFactory;
-import org.slf4j.Logger;
-
/** Filters away broken newlines. */
-public class CRLFtoLFOutputStream extends FilterOutputStream {
+public class CRLFtoLFOutputStream extends java.io.FilterOutputStream {
private static final int EMPTY = -42;
- private final Logger log;
+ private final org.slf4j.Logger log;
private int previous = EMPTY;
/**
* @param dst
* Destination where the result will be written to.
*/
- public CRLFtoLFOutputStream( OutputStream dst ) {
+ public CRLFtoLFOutputStream( java.io.OutputStream dst ) {
this(dst, null);
}
@@ -27,13 +20,13 @@ public class CRLFtoLFOutputStream extends FilterOutputStream {
* @param dst
* Destination where the result will be written to.
*/
- public CRLFtoLFOutputStream( OutputStream dst, ILoggerFactory lf ) {
+ public CRLFtoLFOutputStream( java.io.OutputStream dst, org.slf4j.ILoggerFactory lf ) {
super(dst);
this.log = (lf == null) ? null : lf.getLogger(CRLFtoLFOutputStream.class.getName());
}
@Override
- public void write( int current ) throws IOException {
+ public void write( int current ) throws java.io.IOException {
// We're allowed to ignore the three high octets (See doc of "OutputStream#write").
// This allows us to assign special meanings to those values internally (eg our
// 'EMPTY' value). For this to work, we clear the high bits to not get confused
@@ -56,14 +49,14 @@ public class CRLFtoLFOutputStream extends FilterOutputStream {
// TODO we should override this.
//@Override
- //public void write( byte[] buf, int off, int len ) throws IOException {
+ //public void write( byte[] buf, int off, int len ) throws java.io.IOException {
// throw new UnsupportedOperationException("TODO impl");/*TODO*/
//}
@Override
- public void flush() throws IOException {
- if( previous == '\r' ){
- log.debug("Have to flush a CR byte without knowing if the next byte might be a LF");
+ public void flush() throws java.io.IOException {
+ if( previous == '\r' && log != null ){
+ log.debug("Have to flush a 0x13 byte (CR) without knowing if the next byte might be a 0x10 (LF)");
}
if( previous != EMPTY ){
int tmp = previous;
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyInputStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyInputStream.java
index 34abe55..47db54e 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyInputStream.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyInputStream.java
@@ -5,8 +5,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.atomic.AtomicBoolean;
-import static java.util.Objects.requireNonNull;
-
/** Allows to get notified when stream gets closed. */
public class CloseNotifyInputStream extends FilterInputStream {
@@ -16,7 +14,8 @@ public class CloseNotifyInputStream extends FilterInputStream {
public CloseNotifyInputStream( InputStream src, Runnable onClose ){
super(src);
- this.onClose = requireNonNull(onClose);
+ assert onClose != null : "onClose expected to exist";
+ this.onClose = onClose;
}
@Override
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyOutputStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyOutputStream.java
index a8d6eea..993207d 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyOutputStream.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/CloseNotifyOutputStream.java
@@ -1,30 +1,26 @@
package ch.hiddenalpha.unspecifiedgarbage.octetstream;
-import java.io.FilterOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
import java.util.concurrent.atomic.AtomicBoolean;
-import static java.util.Objects.requireNonNull;
-
/**
* Gives the chance to place a hook to get notified as soon the stream gets
* closed.
*/
-public class CloseNotifyOutputStream extends FilterOutputStream {
+public class CloseNotifyOutputStream extends java.io.FilterOutputStream {
private final Runnable onClose;
private final AtomicBoolean isFired = new AtomicBoolean(false);
- public CloseNotifyOutputStream( OutputStream out, Runnable onClose ){
+ public CloseNotifyOutputStream( java.io.OutputStream out, Runnable onClose ){
super(out);
if( true ) throw new UnsupportedOperationException("TODO need to delegate close call");/*TODO*/
- this.onClose = requireNonNull(onClose);
+ assert onClose != null : "Expected arg 'onClose' to exist";
+ this.onClose = onClose;
}
@Override
- public void close() throws IOException {
+ public void close() throws java.io.IOException {
if (!isFired.getAndSet(true)) {
onClose.run();
}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ConcatInputStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ConcatInputStream.java
index 8c89fc0..656e28c 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ConcatInputStream.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/ConcatInputStream.java
@@ -13,6 +13,9 @@ public class ConcatInputStream extends InputStream {
private int iSrc;
public ConcatInputStream( InputStream... sources ){
+ for( InputStream src : sources ){
+ assert src != null : "Why do you pass a 'null' source here?";
+ }
this.sources = sources;
this.iSrc = 0;
}
@@ -30,7 +33,7 @@ public class ConcatInputStream extends InputStream {
InputStream src = sources[iSrc];
int readLen = src.read(b, off + copied, len - copied);
if( readLen < 0 ){
- assert readLen == -1;
+ assert readLen == -1 : "InputStream.read() MUST NOT return "+ readLen +". RTFM!";
// Source drained. Continue read with next source.
shiftToNextSource();
continue;
@@ -50,7 +53,7 @@ public class ConcatInputStream extends InputStream {
}
return read;
}
- return -1;
+ return -1; /* EOF */
}
@Override
@@ -60,10 +63,11 @@ public class ConcatInputStream extends InputStream {
for( int i = iSrc ; i < sources.length ; ++i ){
try{
sources[i].close();
+ sources[i] = null; /* allow GC */
}catch( IOException|RuntimeException ex ){
if( firstException == null ){
- // Track the exception. But we have to close the
- // remaining streams regardless of early exceptions.
+ // Track exception. But we have to close the remaining
+ // streams regardless of early exceptions.
firstException = ex;
}else if( firstException != ex ){
firstException.addSuppressed(ex);
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/EmptyGzipInputStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/EmptyGzipInputStream.java
index e2e908c..8d3d0f7 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/EmptyGzipInputStream.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/EmptyGzipInputStream.java
@@ -1,14 +1,11 @@
package ch.hiddenalpha.unspecifiedgarbage.octetstream;
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-
/**
- * Same idea as {@link InputStream#nullInputStream()} but serving an empty gzip
+ * Same idea as {@link java.io.InputStream#nullInputStream()} but serving an empty gzip
* instead.
*/
-public class EmptyGzipInputStream extends ByteArrayInputStream {
+public class EmptyGzipInputStream extends java.io.ByteArrayInputStream {
private static final byte[] EMPTY_GZIP = {
0x1F, (byte) 0x8B, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // gzip header
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/IgnoreCloseInputStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/IgnoreCloseInputStream.java
index 8a487ab..0a2c72c 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/IgnoreCloseInputStream.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/IgnoreCloseInputStream.java
@@ -1,21 +1,15 @@
package ch.hiddenalpha.unspecifiedgarbage.octetstream;
-import java.io.FilterInputStream;
-import java.io.InputStream;
-
/**
- * Suppresses to close the underlying stream when close gets called.
+ * <p>Suppresses to close the underlying stream when close gets called.</p>
*/
-public class IgnoreCloseInputStream extends FilterInputStream {
+public class IgnoreCloseInputStream extends java.io.FilterInputStream {
- public IgnoreCloseInputStream( InputStream in ) {
+ public IgnoreCloseInputStream( java.io.InputStream in ) {
super(in);
}
- @Override
- public void close() {
- // NOOP. Do NOT close the stream.
- }
+ @Override public void close() {/*no-op*/}
}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/IgnoreCloseOutputStream.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/IgnoreCloseOutputStream.java
new file mode 100644
index 0000000..d17172a
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/octetstream/IgnoreCloseOutputStream.java
@@ -0,0 +1,31 @@
+package ch.hiddenalpha.unspecifiedgarbage.octetstream;
+
+
+/**
+ * <p>Suppresses to close the underlying {@link java.io.OutputStream} when
+ * close gets called.</p>
+ *
+ * <p>This can be needed for example we get a outputStream passed from caller
+ * and we have to pass it further down to another callee. Which may close the
+ * passed stream. But this is unlucky if our caller (or we ourself) needs to
+ * continue writing to the original stream after we have completed writing what
+ * we were supposed to write.</p>
+ *
+ * <p>For example imagine we're creating a tar archive and pass our
+ * outputStream down to some source which will write a tar entries payload to
+ * the stream and in the end closes the sink. This would make it impossible for
+ * us to write any more entries to that stream.</p>
+ *
+ * <p>WARN: Think before using this filter! Blindly using it without
+ * understanding what this is for, you risk to produce resource-leaks.</p>
+ */
+public class IgnoreCloseOutputStream extends java.io.FilterOutputStream {
+
+ public IgnoreCloseOutputStream(java.io.OutputStream out) {
+ super(out);
+ }
+
+ @Override public void close() {/*no-op*/}
+
+}
+
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/paisa/DomainValidation.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/paisa/DomainValidation.java
new file mode 100644
index 0000000..a569f53
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/paisa/DomainValidation.java
@@ -0,0 +1,8 @@
+package ch.hiddenalpha.unspecifiedgarbage.paisa;
+
+
+public class DomainValidation {
+
+
+
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/shell/ShellUtils.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/shell/ShellUtils.java
index 6c903ec..d6b4dbd 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/shell/ShellUtils.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/shell/ShellUtils.java
@@ -30,16 +30,11 @@ public class ShellUtils {
* String path = "pâth \\with ${MustNotResolveThisVar} and a|so €vil chars like spaces, p|pes or * asterisk";<br/>
* cmd = "ls '"+ escapeForSnglQuotEverything(path) +"'";<br/>
* </code>
+ * <p>For a more detailed explanation see <a
+ * href="https://hiddenalpha.ch/slnk/id/1-ea62ea0b8635c39#f4a94246c53735a69">How
+ * to escape shell commands</a>.</p>
*/
public static String escapeForSingleQuotEverything( String s ){
- // Cited from "Single-Quotes" in "https://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html":
- // Enclosing characters in single-quotes shall preserve the literal
- // value of each character within the single-quotes. A single-quote
- // cannot occur within single-quotes.
- // Cited from "Double-Quotes" in "https://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html":
- // Enclosing characters in double-quotes ( "" ) shall preserve the
- // literal value of all characters within the double-quotes, with the
- // exception of the characters dollar sign, backquote, and backslash
return s.replace("'", "'\"'\"'");
}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java
index 7d712ee..889b3f1 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java
@@ -1,14 +1,5 @@
package ch.hiddenalpha.unspecifiedgarbage.stream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.Iterator;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.function.Function;
-import java.util.function.Predicate;
-
public class StreamUtils {
@@ -23,10 +14,11 @@ public class StreamUtils {
* Count of copied bytes.
*/
public static long copy( java.io.InputStream is, java.io.OutputStream os ) throws java.io.IOException {
- byte[] buffer = new byte[8192];
+ byte[] buffer = new byte[1<<14];
long totalBytes = 0;
- int readLen;
- while( -1 != (readLen=is.read(buffer,0,buffer.length)) ){
+ while( true ){
+ int readLen = is.read(buffer, 0, buffer.length);
+ if( readLen == -1 ) break; /* EOF */
totalBytes += readLen;
os.write(buffer, 0, readLen);
}
@@ -34,18 +26,18 @@ public class StreamUtils {
}
public static <SRC,DST> java.util.Iterator<DST> map( java.util.Iterator<SRC> src , java.util.function.Function<SRC,DST> mapper ) {
- return new Iterator<DST>() {
+ return new java.util.Iterator<DST>() {
@Override public boolean hasNext() { return src.hasNext(); }
@Override public DST next() { return mapper.apply(src.next()); }
};
}
- public static <T> Predicate<T> distinctBy(Function<? super T, ?> keyExtractor) {
- Set<Object> seen = ConcurrentHashMap.newKeySet();
+ public static <T> java.util.function.Predicate<T> distinctBy( java.util.function.Function<? super T, ?> keyExtractor ) {
+ java.util.Set<Object> seen = java.util.concurrent.ConcurrentHashMap.newKeySet();
return t -> seen.add(keyExtractor.apply(t));
}
- public static <T> Predicate<T> not( Predicate<T> p ){
+ public static <T> java.util.function.Predicate<T> not( java.util.function.Predicate<T> p ){
return e -> !p.test(e);
}
diff --git a/src/main/lua/maven/FindLatestPaisaArtifacts.lua b/src/main/lua/maven/FindLatestPaisaArtifacts.lua
new file mode 100644
index 0000000..62b583c
--- /dev/null
+++ b/src/main/lua/maven/FindLatestPaisaArtifacts.lua
@@ -0,0 +1,254 @@
+
+local newHttpClient = require("scriptlee").newHttpClient
+local newTlsClient = assert(require("scriptlee").newTlsClient)
+local objectSeal = require("scriptlee").objectSeal
+local sleep = require("scriptlee").posix.sleep
+local startOrExecute = require("scriptlee").reactor.startOrExecute
+
+local out, log = io.stdout, io.stderr
+local mod = {}
+local LOGDBG = (true)and(function(msg)log:write("[DEBUG] "..msg)end)or(function()end)
+
+
+function mod.printHelp()
+ out:write("\n"
+ .." List latest PAISA maven artifacts\n"
+ .."\n"
+ .." Options:\n"
+ .."\n"
+ .." --yolo\n"
+ .." WARN: only use if you know what you're doing!\n"
+ .."\n"
+ .."\n")
+end
+
+
+function mod.parseArgs( app )
+ local iA = 0
+ local isYolo = false
+::nextArg::
+ iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ goto endOfArgs
+ elseif arg == "--help" then
+ mod.printHelp() return -1
+ elseif arg == "--yolo" then
+ isYolo = true
+ else
+ log:write("Unexpected arg: "..tostring(arg).."\n")return -1
+ end
+ goto nextArg
+::endOfArgs::
+ if not isYolo then log:write("Bad Args\n") return -1 end
+ return 0
+end
+
+
+function mod.compareVersion(a, b)
+ local semverFmt = "^(%d+)%.(%d+)%.(%d+)"
+ local gagaFmt = "^(%d+)%.(%d+)%.(%d+)%.(%d+)"
+ -- parse
+ local amaj, amin, apat, abui = a:match(semverFmt)
+ if not amaj then amaj, amin, apat, abui = a:match(gagaFmt) end
+ local bmaj, bmin, bpat, bbui = b:match(semverFmt)
+ if not bmaj then bmaj, bmin, bpat, bbui = b:match(gagaFmt) end
+ -- compare
+ --LOGDBG("CMP "..tostring(a).." VS "..tostring(b).."\n")
+ local diff = amaj - bmaj
+ if diff ~= 0 then return diff end
+ diff = amin - bmin
+ if diff ~= 0 then return diff end
+ diff = apat - bpat
+ if diff ~= 0 then return diff end
+ if abui and not bbui then return 1 end
+ if not abui and bbui then return -1 end
+ if not abui and not bbui then return 0 end
+ diff = abui - bbui
+ if diff ~= 0 then return diff end
+ return 0
+end
+
+
+function mod.newWebDirListParser( app, opts )
+ local cb_cls = opts.cls
+ local cb_onEntry = opts.onEntry
+ local cb_onEnd = opts.onEnd
+ opts = nil
+ local t = {
+ collected = {},
+ }
+ local m = {
+ write = function( t, buf )
+ table.insert(t.collected, buf)
+ end,
+ closeSnk = function( t )
+ local buf = table.concat(t.collected)
+ local iter = buf:gmatch('<a href%="([^"]+)">[^<]+</a> +%d+%-%a+%-%d+ %d+:%d+[^\n]+\n')
+ for aid,_ in iter do
+ cb_onEntry(aid, cb_cls)
+ end
+ cb_onEnd(cb_cls)
+ end,
+ __index = false,
+ } m.__index = m
+ return setmetatable(t, m)
+end
+
+
+function mod.onMvnPomFoundInArtifactory( app, path )
+ local gid, aid, ver, aid, ver = path:match(
+ "^/artifactory/paisa/(.+)/([^/]+)/([^/]+)/([^/]+)-([^/]+).pom")
+ if not gid or not aid or not ver then
+ log:write("input: \"".. path .."\"\n")
+ error("Failed to extract artifact identity")
+ end
+ gid = gid:gsub('/', '.')
+ out:write("r;".. gid ..";".. aid ..";".. ver .."\n")
+end
+
+
+function mod.onArtifactSubdirFoundInArtifactory( app, path )
+ assert(not path:find("/$"), path)
+ path = path .."/"
+ --if path:len() > 37 and not path:find("^/artifactory/paisa/ch/post/it/paisa/preflux") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/[^/]+/[^-]+%-config/") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/[^/]+/[^-]+%-domain/") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/[^/]+/[^-]+%-test/") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/alice") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/aseed") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/data/resources/paisa%-data%-resources%-") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/data/transformer/") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/paisa-devpack") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/paisa-pom") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/paisa-superpom") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/tragula") then goto skipThisPath end
+ if path:find("^/artifactory/paisa/ch/post/it/paisa/tyro") then goto skipThisPath end
+ goto doHttpRequest
+::skipThisPath::
+ --LOGDBG("ignore '".. path .."'\n")
+ if true then return end
+::doHttpRequest::
+ local req = objectSeal{
+ base = false,
+ rspStatus = false,
+ rspParser = false,
+ listOfChilds = false,
+ }
+ local reqMethod = "GET"
+ --LOGDBG(reqMethod .." ".. path .."\n")
+ req.base = app.http:request{
+ cls = req,
+ host = app.artifactoryInaddr,
+ port = app.artifactoryPort,
+ method = reqMethod,
+ url = path,
+ onRspHdr = function( rsp, req )
+ req.rspStatus = rsp.status
+ if req.rspStatus ~= 200 then
+ log:write("REQ ".. reqMethod .." ".. path .."\n")
+ log:write("RSP ".. rsp.proto .." ".. rsp.status .." ".. rsp.phrase .."\n")
+ for _,hdr in ipairs(rsp.headers) do
+ log:write("RSP ".. hdr[1] ..": ".. hdr[2] .."\n")
+ end
+ log:write("RSP \n")
+ end
+ req.listOfChilds = {}
+ end,
+ onRspChunk = function( buf, req )
+ if req.rspStatus ~= 200 then
+ log:write("RSP ".. buf:gsub('\n', '\nRSP ') ..'\n')
+ return
+ end
+ req.rspParser:write(buf)
+ end,
+ onRspEnd = function()
+ if req.rspStatus ~= 200 then return end
+ req.rspParser:closeSnk()
+ req.rspStatus = "OK"
+ end,
+ }
+ req.rspParser = mod.newWebDirListParser(app, {
+ cls = req,
+ onEntry = function( e, req ) table.insert(req.listOfChilds, e) end,
+ onEnd = function( req ) end,
+ })
+ local ok, emsg = pcall(req.base.closeSnk, req.base)
+ if not ok then
+ if emsg:find("^ENOMSG") then -- No idea why artifactory does this sometimes
+ log:write(tostring(emsg).."\n")
+ log:write("sleep(7) ...\n"); sleep(7)
+ goto doHttpRequest
+ end
+ error(emsg)
+ end
+ if req.rspStatus ~= "OK" then error("Unexpected response") end
+ -- Cleanup childs
+ local old, childs, poms
+ old = req.listOfChilds
+ childs = {}
+ poms = {}
+ for _,child in pairs(req.listOfChilds) do
+ if child:find("^%d+%.%d+%.%d+%-") then goto skipThisChild end -- Skip pre-release (semver)
+ if child:find("^%d+%.%d+%.%d+%.%d+%-") then goto skipThisChild end -- Skip pre-release (döns format)
+ if child:find("/$") then
+ table.insert(childs, child)
+ goto nextChild
+ end
+ if child:find("%.pom$") then
+ --LOGDBG("Keep \""..tostring(child).."\" (type=POM)\n")
+ table.insert(poms, child)
+ goto nextChild
+ end
+ ::skipThisChild::
+ --LOGDBG("Skip \""..tostring(child).."\"\n")
+ ::nextChild::
+ end
+ -- Drop obsolete releases: Find versions, sort them, keep only bunch of
+ -- what looks recent enough.
+ local versionAlike = {}
+ local iC = 0
+ while iC < #childs do iC = iC + 1 -- Collect things that look like versions
+ local child = childs[iC]
+ if child:find("^%d+%.%d+%.%d+") or child:find("^%d+%.%d+%.%d+%.%d+") then
+ table.remove(childs, iC)
+ table.insert(versionAlike, child)
+ iC = iC - 1
+ end
+ end
+ table.sort(versionAlike, function(a, b) return mod.compareVersion(b, a) < 0 end)
+ iC = 0
+ for k, v in ipairs(versionAlike) do
+ iC = iC + 1; if iC > 1 then break end -- add only a few
+ --LOGDBG("Keep \""..tostring(versionAlike[iC]).."\"\n")
+ table.insert(childs, versionAlike[iC])
+ end
+ -- Process POMs
+ for _, pom in pairs(poms) do
+ mod.onMvnPomFoundInArtifactory(app, path .. pom)
+ end
+ -- Process childs
+ for _,child in pairs(childs) do
+ mod.onArtifactSubdirFoundInArtifactory(app, path .. child:gsub('/$',''))
+ end
+end
+
+
+function mod.run( app )
+ mod.onArtifactSubdirFoundInArtifactory(app, "/artifactory/paisa/ch/post/it/paisa")
+end
+
+
+function mod.main()
+ local app = objectSeal{
+ http = newHttpClient{},
+ artifactoryInaddr = "artifactory.pnet.ch",
+ artifactoryPort = 443,
+ }
+ if mod.parseArgs(app) ~= 0 then os.exit(1) end
+ mod.run(app)
+end
+
+
+startOrExecute(mod.main)
+
diff --git a/src/main/lua/maven/MvnCentralDepScan.lua b/src/main/lua/maven/MvnCentralDepScan.lua
index 0ea449f..5322bc0 100644
--- a/src/main/lua/maven/MvnCentralDepScan.lua
+++ b/src/main/lua/maven/MvnCentralDepScan.lua
@@ -20,6 +20,7 @@ local startOrExecute = require("scriptlee").reactor.startOrExecute
local out, log = io.stdout, io.stderr
local mod = {}
+local LOGDBG = (false)and(function(msg)log:write("[DEBUG] "..msg)end)or(function()end)
function mod.printHelp()
@@ -28,19 +29,31 @@ function mod.printHelp()
.."\n"
.." Options:\n"
.."\n"
- .." --example\n"
- .." WARN: only use if you know what you're doing!\n"
+ .." --export-artifacts\n"
+ .." Export all known artifacts as CSV to stdout.\n"
.."\n"
- .." --sqliteOut <path>\n"
- .." Path where to export the result.\n"
+ .." --export-parents\n"
+ .." Export all known parent relations as CSV to stdout.\n"
.."\n"
+ .." --export-parents-latest\n"
+ .." Export parent relations only of the latest known releases as CSV\n"
+ .." to stdout.\n"
+ .."\n"
+ .." --export-deps\n"
+ .." Export all known dependency relations as CSV to stdout.\n"
+ .."\n"
+ .." --export-deps-latest\n"
+ .." Export dependency relations only of the latest known releases as\n"
+ .." CSV to stdout.\n"
+ .."\n"
+ .." --state <path>\n"
+ .." Data file to use for this operation.\n"
.."\n")
end
function mod.parseArgs( app )
local iA = 0
- local isExample = false
::nextArg::
iA = iA + 1
local arg = _ENV.arg[iA]
@@ -48,44 +61,84 @@ function mod.parseArgs( app )
goto endOfArgs
elseif arg == "--help" then
mod.printHelp() return -1
- elseif arg == "--example" then
- isExample = true
- elseif arg == "--sqliteOut" then
+ elseif arg == "--export-artifacts" then
+ app.operation = "export-artifacts"
+ elseif arg == "--export-parents" then
+ app.operation = "export-parents"
+ elseif arg == "--export-parents-latest" then
+ app.operation = "export-parents-latest"
+ elseif arg == "--export-deps" then
+ app.operation = "export-deps"
+ elseif arg == "--export-deps-latest" then
+ app.operation = "export-deps-latest"
+ elseif arg == "--yolo" then
+ app.isYolo = true
+ elseif arg == "--state" then
iA = iA + 1
arg = _ENV.arg[iA]
if not arg then log:write("Arg --sqliteOut needs value\n")return-1 end
- app.sqliteOutFile = arg
+ app.sqliteFile = arg
else
log:write("Unexpected arg: "..tostring(arg).."\n")return -1
end
goto nextArg
::endOfArgs::
- if not isExample then log:write("Bad Args\n") return -1 end
+ if not app.sqliteFile then log:write("Arg --state missing\n") return-1 end
+ if not app.isYolo and not app.operation then log:write("Bad Args\n") return -1 end
return 0
end
-function mod.newPomUrlSrc( app )
- local urls = {
- -- TODO insert URLs here!
- }
- local m = {
- nextPomUrl = function(t)
- return table.remove(urls, 1)
- end,
- __index = false,
- }
- m.__index = m
- return setmetatable({}, m)
+function mod.strTrim( str )
+ if not str then return str end
+ return str:gsub("^%s+", ""):gsub("%s+$", "")
+end
+
+
+function mod.compareVersion(a, b)
+ local semverFmt = "^(%d+)%.(%d+)%.(%d+)"
+ local gagaFmt = "^(%d+)%.(%d+)%.(%d+)%.(%d+)"
+ local fuckitFmt = "^(.*)$"
+ local isGaga, isFuckit = false, false
+ -- parse
+ local amaj, amin, apat, abui = a:match(semverFmt)
+ if not amaj then amaj, amin, apat, abui = a:match(gagaFmt); isGaga=true end
+ if not amaj then isFuckit=true end
+ local bmaj, bmin, bpat, bbui = b:match(semverFmt)
+ if not bmaj then bmaj, bmin, bpat, bbui = b:match(gagaFmt) isGaga=true end
+ if not bmaj then isFuckit=true end
+ if isFuckit then isGaga = false end -- reset
+ -- compare
+ --LOGDBG("CMP "..tostring(a).." VS "..tostring(b).."\n")
+ local diff
+ if isFuckit then -- give a sh*t of proper comparison for ugly versions.
+ if a > b then return 1 end
+ if a < b then return -1 end
+ return 0
+ end
+ diff = amaj - bmaj
+ if diff ~= 0 then return diff end
+ diff = amin - bmin
+ if diff ~= 0 then return diff end
+ diff = apat - bpat
+ if diff ~= 0 then return diff end
+ if abui and not bbui then return 1 end
+ if not abui and bbui then return -1 end
+ if not abui and not bbui then return 0 end
+ diff = abui - bbui
+ if diff ~= 0 then return diff end
+ return 0
end
function mod.processXmlValue( pomParser )
- local app = pomParser.req.app
- local xpath = ""
+ local app = pomParser.app
+ local xpath = {}
for i, stackElem in ipairs(pomParser.xmlElemStack) do
- xpath = xpath .."/".. stackElem.tag
+ table.insert(xpath, "/")
+ table.insert(xpath, stackElem.tag)
end
+ xpath = table.concat(xpath)
--log:write(xpath .."\n")
local mvnArtifact = pomParser.mvnArtifact
local newMvnDependency = function()return objectSeal{
@@ -122,6 +175,10 @@ function mod.processXmlValue( pomParser )
pomParser.mvnDependency = false
local deps = app.mvnDepsByArtifact[mvnArtifact]
if not deps then deps = {} app.mvnDepsByArtifact[mvnArtifact] = deps end
+ -- need to trim values
+ mvnArtifact.groupId = mod.strTrim(mvnArtifact.groupId)
+ mvnArtifact.artifactId = mod.strTrim(mvnArtifact.artifactId)
+ mvnArtifact.version = mod.strTrim(mvnArtifact.version)
table.insert(deps, assert(mvnDependency))
elseif xpath == "/project/dependencyManagement/dependencies/dependency/groupId" then
if not pomParser.mvnMngdDependency then pomParser.mvnMngdDependency = newMvnDependency() end
@@ -163,18 +220,32 @@ function mod.getMvnArtifactKey( mvnArtifact )
end
-function mod.onGetPomRspHdr( msg, req )
- log:write("< "..tostring(msg.proto) .." "..tostring(msg.status).." "..tostring(msg.phrase).."\n")
- --for i, h in ipairs(msg.headers) do
- -- log:write("< ".. h.key ..": ".. h.val .."\n")
- --end
- --log:write("< \n")
- if msg.status ~= 200 then
- error("Unexpected HTTP ".. tostring(msg.status))
+function mod.onMvnArtifactThatShouldBeFetched( app, mvnArtifact )
+ assert(type(mvnArtifact.artifactId) == "string")
+ assert(type(mvnArtifact.version) == "string")
+ assert(type(mvnArtifact.groupId) == "string")
+ local key = mod.getMvnArtifactKey(mvnArtifact)
+ if app.mvnArtifactsNotFound[key] then
+ LOGDBG("do NOT enqueue bcause 404: ".. mvnArtifact.artifactId .." ".. mvnArtifact.version .."\n")
+ return
end
- assert(not req.pomParser)
- req.pomParser = objectSeal{
- req = req,
+ if app.mvnArtifactsAlreadyParsed[key] then
+ LOGDBG("do NOT enqueue bcause have already: ".. mvnArtifact.artifactId .." ".. mvnArtifact.version .."\n")
+ return
+ end
+ app.mvnArtifactsAlreadyParsed[key] = true -- TODO maybe should do this in another place
+ table.insert(app.nextUrlsToFetch, {
+ artifactId = mvnArtifact.artifactId,
+ version = mvnArtifact.version,
+ groupId = mvnArtifact.groupId,
+ })
+end
+
+
+function mod.newPomParser( app, cls )
+ local pomParser = objectSeal{
+ app = app,
+ outerCls = cls,
base = false,
xmlElemStack = {},
currentValue = false,
@@ -191,47 +262,253 @@ function mod.onGetPomRspHdr( msg, req )
write = function( t, buf ) t.base:write(buf) end,
closeSnk = function( t, buf ) t.base:closeSnk() end,
}
- req.pomParser.base = newXmlParser{
- cls = req.pomParser,
+ pomParser.base = newXmlParser{
+ cls = pomParser,
onElementBeg = function( tag, pomParser )
table.insert(pomParser.xmlElemStack, { tag = tag, })
pomParser.currentValue = false
end,
onElementEnd = function( tag, pomParser )
+ if type(pomParser.currentValue) == "table" then
+ pomParser.currentValue = table.concat(pomParser.currentValue)
+ end
mod.processXmlValue(pomParser)
local elem = table.remove(pomParser.xmlElemStack)
assert(elem.tag == tag);
end,
onChunk = function( buf, pomParser )
- if pomParser.currentValue then
- pomParser.currentValue = pomParser.currentValue .. buf
+ if type(pomParser.currentValue) ~= "table" then
+ pomParser.currentValue = { buf }
else
- pomParser.currentValue = buf
+ table.insert(pomParser.currentValue, buf)
end
end,
onEnd = function( pomParser )
assert(#pomParser.xmlElemStack == 0)
- local req = pomParser.req
- local app = req.app
+ local app = pomParser.app
local mvnArtifact = pomParser.mvnArtifact
pomParser.mvnArtifact = false
if not mvnArtifact.groupId then mvnArtifact.groupId = mvnArtifact.parentGroupId end
if not mvnArtifact.version then mvnArtifact.version = mvnArtifact.parentVersion end
local key = mod.getMvnArtifactKey(mvnArtifact)
- assert(not app.mvnArtifacts[key])
+ if app.mvnArtifacts[key] then
+ log:write("[WARN ] Already have aid=".. mvnArtifact.artifactId
+ ..", ver=".. mvnArtifact.version ..", gid=".. mvnArtifact.groupId .."\n")
+ return
+ end
app.mvnArtifacts[key] = mvnArtifact
+ table.insert(app.taskQueue, function()
+ mod.onNewArtifactGotFetched(app, mvnArtifact)
+ end)
end,
}
+ return pomParser
+end
+
+
+function mod.fetchMvnArtifactFromFileOrElseSrcNr( app, mvnArtifact, repoDir, pomSrcNrFallback )
+ local path = repoDir
+ .."/".. mvnArtifact.groupId:gsub('%.','/')
+ .."/".. mvnArtifact.artifactId
+ .."/".. mvnArtifact.version
+ .."/".. mvnArtifact.artifactId .."-".. mvnArtifact.version
+ ..".pom"
+ local fd = io.open(path, "rb")
+ if not fd then
+ --LOGDBG("ENOENT ".. path .."\n")
+ mod.fetchFromSourceNr(app, mvnArtifact, pomSrcNrFallback)
+ return
+ end
+ --log:write("fopen(\"".. path .."\", \"rb\")\n")
+ local file = objectSeal{
+ base = false,
+ pomParser = false,
+ }
+ file.pomParser = mod.newPomParser(app, false)
+ while true do
+ local buf = fd:read(1<<16)
+ if not buf then break end
+ local ok, emsg = pcall(file.pomParser.write, file.pomParser, buf)
+ if not ok then
+ if emsg:find("^%[[^]]+%]:%d+: XMLParseError .+ unknown encoding") then
+ log:write("[ERROR] Ignore: ".. emsg .."\n")
+ sleep(3)
+ return
+ end
+ error(emsg)
+ end
+ end
+ file.pomParser:closeSnk()
+end
+
+
+function mod.fetchMvnArtifactFromWebserverOrElseSrcNr( app, mvnArtifact, baseUrl, pomSrcNrFallback )
+ local aid = assert(mvnArtifact.artifactId)
+ local ver = assert(mvnArtifact.version)
+ local gid = assert(mvnArtifact.groupId)
+ local pomUrl = baseUrl .."/paisa/".. gid:gsub('%.','/')
+ .."/".. aid .."/".. ver .."/".. aid .."-".. ver ..".pom"
+ local proto = pomUrl:match("^(https?)://")
+ local isTLS = (proto:upper() == "HTTPS")
+ local host = pomUrl:match("^https?://([^:/]+)[:/]")
+ local port = pomUrl:match("^https?://[^:/]+:(%d+)[^%d]")
+ local url = pomUrl:match("^https?://[^/]+(.*)$")
+ if port == 443 then isTLS = true end
+ if not port then port = (isTLS and 443 or 80) end
+::doHttpRequest::
+ log:write("> GET ".. proto .."://".. host ..":".. port .. url .."\n")
+ local req = objectSeal{
+ app = app,
+ base = false,
+ pomParser = false,
+ artifactId = aid, -- so we know what we're trying to fetch
+ version = ver, -- so we know what we're trying to fetch
+ groupId = gid, -- so we know what we're trying to fetch
+ }
+ req.base = app.http:request{
+ cls = req,
+ host = assert(host), port = assert(port),
+ method = "GET", url = url,
+ --hdrs = ,
+ useTLS = isTLS,
+ onRspHdr = mod.onGetPomRspHdr,
+ onRspChunk = function( buf, req ) if req.pomParser then req.pomParser:write(buf) end end,
+ onRspEnd = function( req ) if req.pomParser then req.pomParser:closeSnk() end end,
+ }
+ local ok, emsg = pcall(req.base.closeSnk, req.base)
+ if not ok then
+ if emsg:find("^ENOMSG") then
+ log:write("ENOMSG Artifactory closed connection appruptly?!? Retry in a few seconds ....\n"); sleep(7)
+ goto doHttpRequest
+ end
+ error(emsg)
+ end
+end
+
+
+function mod.fetchFromSourceNr( app, mvnArtifact, pomSrcNr )
+ if not pomSrcNr then pomSrcNr = 1 end
+ local pomSrc = app.pomSources[pomSrcNr]
+ if not pomSrc then
+ mod.onNoMorePomSources(app, mvnArtifact)
+ end
+ -- pom source ready to use
+ if false then
+ elseif pomSrc.type == "local-file-cache" then
+ --LOGDBG("Fetch from local file cache\n")
+ mod.fetchMvnArtifactFromFileOrElseSrcNr(app, mvnArtifact, pomSrc.repoDir, pomSrcNr + 1)
+ elseif pomSrc.type == "webserver" then
+ --LOGDBG("Fetch from webserver\n")
+ mod.fetchMvnArtifactFromWebserverOrElseSrcNr(app, mvnArtifact, pomSrc.baseUrl, pomSrcNr + 1)
+ else
+ error("Whops. pomSources[i].type is ".. pomSrc.type)
+ end
end
-function mod.onGetPomRspChunk( buf, req )
- req.pomParser:write(buf)
+function mod.fetchAnotherMvnArtifact( app, pomSrcNr )
+::findNextArtifactToFetch::
+ local mvnArtifact
+ mvnArtifact = table.remove(app.currentUrlsToFetch, 1)
+ if not mvnArtifact then
+ assert(#app.currentUrlsToFetch == 0)
+ if #app.nextUrlsToFetch > 0 then -- switch to next set to process
+ --LOGDBG("currentUrlsToFetch drained. Continue with nextUrlsToFetch\n")
+ app.currentUrlsToFetch = app.nextUrlsToFetch
+ app.nextUrlsToFetch = {}
+ goto findNextArtifactToFetch
+ end
+ return
+ end
+ assert(mvnArtifact)
+ mod.fetchFromSourceNr(app, mvnArtifact, 1)
+end
+
+
+function mod.onMvnArtifactThatShouldBeScannedForDeps( app, mvnArtifact )
+ assert(type(mvnArtifact.artifactId) == "string")
+ assert(type(mvnArtifact.version) == "string")
+ assert(type(mvnArtifact.groupId) == "string")
+end
+
+
+function mod.enqueueMissingDependencies( app, mvnArtifact )
+ local hasParent = (not not mvnArtifact.parentArtifactId)
+ if hasParent then
+ local parentKey = mod.getMvnArtifactKey({
+ artifactId = mvnArtifact.parentArtifactId,
+ version = mvnArtifact.parentVersion,
+ groupId = mvnArtifact.parentGroupId,
+ })
+ local parent = app.mvnArtifacts[parentKey]
+ if not parent then
+ --LOGDBG("Enqueue parent: aid=".. mvnArtifact.parentArtifactId
+ -- .." v=".. mvnArtifact.parentVersion.." gid=".. mvnArtifact.parentGroupId .."\n")
+ mod.onMvnArtifactThatShouldBeFetched(app, objectSeal{
+ artifactId = mvnArtifact.parentArtifactId,
+ version = mvnArtifact.parentVersion,
+ groupId = mvnArtifact.parentGroupId,
+ })
+ end
+ end
+ local deps = app.mvnDepsByArtifact[mvnArtifact]
+ if not deps then
+ --LOGDBG("Has no dependencies: aid="..tostring(mvnArtifact.artifactId)
+ -- ..", ver="..tostring(mvnArtifact.version)..", gid="..tostring(mvnArtifact.groupId).."\n")
+ return
+ end
+ for _,dep in pairs(deps) do
+ local isIncomplete = (not dep.artifactId or not dep.version or not dep.groupId)
+ local hasUnresolvedMvnProps = false
+ if not isIncomplete then
+ hasUnresolvedMvnProps = (dep.artifactId:find("${",0,true) or dep.version:find("${",0,true) or dep.groupId:find("${",0,true))
+ end
+ if isIncomplete or hasUnresolvedMvnProps then
+ --LOGDBG("Incomplete. Give up aid="..tostring(dep.artifactId)
+ -- .." v="..tostring(dep.version).." gid="..tostring(dep.groupId).."\n")
+ else
+ --LOGDBG("Enqueue dependency: aid="..tostring(dep.artifactId)
+ -- .." v="..tostring(dep.version).." gid="..tostring(dep.groupId).."\n")
+ --if dep.artifactId:find("\n") or dep.version:find("\n") or dep.groupId:find("\n") then
+ -- log:write("Wanted by aid="..tostring(mvnArtifact.artifactId)
+ -- ..", ver="..tostring(mvnArtifact.version)
+ -- ..", gid="..tostring(mvnArtifact.groupId).."\n");
+ -- error("WTF?!?")
+ --end
+ mod.onMvnArtifactThatShouldBeFetched(app, objectSeal{
+ artifactId = dep.artifactId,
+ version = dep.version,
+ groupId = dep.groupId,
+ })
+ end
+ end
+end
+
+
+function mod.onGetPomRspHdr( msg, req )
+ local app = req.app
+ if msg.status == 404 then
+ log:write("HTTP 404. Ignore aid='".. req.artifactId .."' v='".. req.version .."' gid='".. req.groupId .."'.\n")
+ local key = assert(mod.getMvnArtifactKey(req))
+ app.mvnArtifactsNotFound[key] = true
+ return
+ elseif msg.status ~= 200 then
+ log:write("< "..tostring(msg.proto) .." "..tostring(msg.status).." "..tostring(msg.phrase).."\n")
+ for i, h in ipairs(msg.headers) do
+ log:write("< ".. h[1] ..": ".. h[2] .."\n")
+ end
+ log:write("< \n")
+ error("Unexpected HTTP ".. tostring(msg.status))
+ end
+ assert(not req.pomParser)
+ req.pomParser = mod.newPomParser(app, req)
end
-function mod.onGetPomRspEnd( req )
- req.pomParser:closeSnk()
+function mod.onNewArtifactGotFetched( app, mvnArtifact )
+ mod.resolveProperties(app) -- TODO IMHO we shouldn't call that so often
+ mod.resolveDependencyVersionsFromDepsMgmnt(app) -- TODO IMHO we shouldn't call that so often
+ mod.enqueueMissingDependencies(app, mvnArtifact)
end
@@ -241,34 +518,44 @@ function mod.resolveDependencyVersionsFromDepsMgmnt( app )
local mvnMngdDepsByArtifact = app.mvnMngdDepsByArtifact
local funcs = {}
function funcs.resolveForDependency( mvnArtifact, mvnDependency )
+ assert(mvnArtifact)
+ assert(mvnDependency)
+ --LOGDBG("resolveForDependency(".. mvnArtifact.artifactId .."-".. mvnArtifact.version ..", "
+ -- .. mvnDependency.artifactId .."-"..tostring(mvnDependency.version)..")\n")
+ -- Nothing to do if its already set
if mvnDependency.version then return end
+ -- Do we have deps management available?
local mngdDeps = mvnMngdDepsByArtifact[mvnArtifact]
- if not mngdDeps then return end
- for _, mngdDep in pairs(mngdDeps) do
- if mvnDependency.groupId == mngdDep.groupId
- and mvnDependency.artifactId == mngdDep.artifactId
- then
- mvnDependency.version = assert(mngdDep.version);
- break
+ if mngdDeps then
+ -- Lookup our own deps management for a version
+ for _, mngdDep in pairs(mngdDeps) do
+ if mvnDependency.groupId == mngdDep.groupId
+ and mvnDependency.artifactId == mngdDep.artifactId
+ then
+ -- Version found :)
+ mvnDependency.version = assert(mngdDep.version);
+ return
+ end
end
end
- local hasParent = (mvnArtifact.parentArtifactId)
- if not mvnDependency.version and hasParent then
- -- Cannot resolve. Delegate to parent.
- local parent = mvnArtifacts[mod.getMvnArtifactKey{
+ assert(not mvnDependency.version)
+ -- no deps management? Maybe parent has?
+ local parent
+ if mvnArtifact.parentArtifactId then -- has its parent declared
+ parent = mvnArtifacts[mod.getMvnArtifactKey{
groupId = mvnArtifact.parentGroupId,
artifactId = mvnArtifact.parentArtifactId,
version = mvnArtifact.parentVersion,
}];
- if parent then
- funcs.resolveForDependency(parent, mvnDependency)
- end
+ end
+ if parent then -- parent exists
+ funcs.resolveForDependency(parent, mvnDependency)
end
end
function funcs.resolveForArtifact( mvnArtifact )
+ --LOGDBG("resolveForArtifact("..mvnArtifact.artifactId..", ".. mvnArtifact.version ..")\n")
local mvnDeps = mvnDepsByArtifact[mvnArtifact]
if not mvnDeps then return end
- if not mvnMngdDepsByArtifact[mvnArtifact] then return end
for _, mvnDependency in pairs(mvnDeps) do
funcs.resolveForDependency(mvnArtifact, mvnDependency)
end
@@ -302,7 +589,7 @@ function mod.resolveProperties( app )
if propKey then
local propVal = mod.getPropValThroughParentChain(app, mvnArtifact, propKey)
if propVal then
- mvnDependency.version = propVal
+ mvnDependency.version = mod.strTrim(propVal)
end
end
end
@@ -338,7 +625,7 @@ function mod.getPropValThroughParentChain( app, mvnArtifact, propKey, none )
}]
end
if not parent then
- log:write("[INFO ] Cannot resolve ${"..propKey.."}\n")
+ LOGDBG("Cannot resolve ${"..propKey.."}\n")
return nil
end
return mod.getPropValThroughParentChain(app, parent, propKey)
@@ -346,6 +633,7 @@ end
function mod.printStuffAtEnd( app )
+ if true then return end
local mvnArtifacts = {}
for _, mvnArtifact in pairs(app.mvnArtifacts) do
table.insert(mvnArtifacts, mvnArtifact)
@@ -360,139 +648,137 @@ function mod.printStuffAtEnd( app )
return false
end)
for _, mvnArtifact in ipairs(mvnArtifacts) do
- log:write("ARTIFACT "..tostring(mvnArtifact.groupId)
- .." "..tostring(mvnArtifact.artifactId)
- .." "..tostring(mvnArtifact.version).."\n")
- log:write(" PARENT ".. tostring(mvnArtifact.parentGroupId)
- .." ".. tostring(mvnArtifact.parentArtifactId)
- .." ".. tostring(mvnArtifact.parentVersion) .."\n")
+ log:write(string.format("ARTIFACT %-30s %-13s %s\n",
+ mvnArtifact.artifactId, mvnArtifact.version, mvnArtifact.groupId))
+ log:write(string.format(" PARENT %-30s %-13s %s\n",
+ mvnArtifact.parentArtifactId, mvnArtifact.parentVersion, mvnArtifact.parentGroupId))
local deps = app.mvnDepsByArtifact[mvnArtifact]
- local mvnProps = app.mvnPropsByArtifact[mvnArtifact]
+ --local mvnProps = app.mvnPropsByArtifact[mvnArtifact]
--if mvnProps then for _, mvnProp in pairs(mvnProps) do
- -- log:write(" PROP ".. mvnProp.key .."=".. mvnProp.val .."\n")
+ -- log:write(string.format(" PROP %-44s %s\n", mvnProp.key, mvnProp.val))
--end end
if deps then for _, mvnDependency in pairs(deps) do
- log:write(" DEP ".. mvnDependency.artifactId .." "..tostring(mvnDependency.version).."\n")
+ log:write(string.format(" DEP %-30s %-13s %s\n",
+ mvnDependency.artifactId, mvnDependency.version, mvnDependency.groupId))
end end
end
end
+function mod.getOrNewStringId( app, str )
+ local err -- serves as tmp and retval
+ local db, ok, emsg, rs, stringId, stmt, stmtStr
+ -- serve from memory if possible
+ if not str then err = nil; goto endFn end
+ stringId = app.cachedStringIds[str]
+ if stringId then err = stringId; goto endFn end
+ -- serve from DB if possible.
+ db = app.db
+ stmtStr = "SELECT id FROM String WHERE str = :str"
+ stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr); app.stmtCache[stmtStr] = stmt end
+ stmt:reset() stmt:bind(":str", str)
+ rs = stmt:execute()
+ if rs:next() then -- already exists. re-use
+ err = rs:value(1)
+ app.cachedStringIds[str] = err
+ goto endFn
+ end
+ -- no such string yet. create.
+ stmtStr = "INSERT INTO String (str) VALUES (:str)"
+ stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr); app.stmtCache[stmtStr] = stmt end
+ stmt:reset(); stmt:bind(":str", str);
+ ok, emsg = pcall(stmt.execute, stmt)
+ if not ok then
+ log:write("String: \"".. str .."\"\n")
+ error(emsg)
+ end
+ err = db:lastInsertRowid()
+ app.cachedStringIds[str] = err
+::endFn::
+ --LOGDBG("dbStr(\""..tostring(str).."\") -> "..tostring(err).."\n");
+ return err
+end
+
+
+function mod.insertMvnArtifact( app, mvnArtifact )
+ local a = mvnArtifact
+ assert(a.groupId and a.artifactId and a.version)
+ if a.parentGroupId then assert(a.parentArtifactId and a.parentVersion)
+ else assert(not a.parentArtifactId and not a.parentVersion) end
+ -- Get needed string IDs
+ local gid = mod.getOrNewStringId(app, a.groupId)
+ local aid = mod.getOrNewStringId(app, a.artifactId)
+ local ver = mod.getOrNewStringId(app, a.version)
+ local pgid = mod.getOrNewStringId(app, a.parentGroupId)
+ local paid = mod.getOrNewStringId(app, a.parentArtifactId)
+ local pver = mod.getOrNewStringId(app, a.parentVersion)
+ -- look if it already exists
+ local db = app.db
+ local stmtStr = "SELECT id from MvnArtifact"
+ .." WHERE artifactId = :aid AND version = :ver AND groupId = :gid"
+ .." AND parentArtifactId = :paid AND parentVersion = :pver AND parentGroupId = :pgid"
+ local stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr); app.stmtCache[stmtStr] = stmt end
+ stmt:reset()
+ stmt:bind(":aid", aid) stmt:bind(":ver", ver) stmt:bind(":gid", gid)
+ stmt:bind(":paid", paid) stmt:bind(":pver", pver) stmt:bind(":pgid", pgid)
+ local rs = stmt:execute()
+ local dbId
+ if rs:next() then
+ -- Already exists
+ dbId = assert(rs:value(1))
+ else
+ -- no such record. create it.
+ local stmtStr = "INSERT INTO MvnArtifact"
+ .."('groupId', 'artifactId', 'version', 'parentGroupId', 'parentArtifactId', 'parentVersion')"
+ .."VALUES"
+ .."(:groupId , :artifactId , :version , :parentGroupId , :parentArtifactId , :parentVersion )"
+ local stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr); app.stmtCache[stmtStr] = stmt end
+ stmt:reset()
+ stmt:bind(":groupId", gid)
+ stmt:bind(":artifactId", aid)
+ stmt:bind(":version", ver)
+ stmt:bind(":parentGroupId", pgid)
+ stmt:bind(":parentArtifactId", paid)
+ stmt:bind(":parentVersion", pver)
+ stmt:execute()
+ dbId = assert(db:lastInsertRowid())
+ end
+ app.cachedMvnArtifactDbIds[a] = dbId
+ local bucket = app.mvnArtifactIdsByArtif[assert(a.artifactId)]
+ if not bucket then bucket = {} app.mvnArtifactIdsByArtif[a.artifactId] = bucket end
+ table.insert(bucket, { dbId = dbId, mvnArtifact = a, })
+ return dbId
+end
+
+
function mod.storeAsSqliteFile( app )
- -- TODO could we cache our prepared queries?
- local db, stmt
- if not app.sqliteOutFile then
- log:write("[INFO ] No sqliteOutFile provided. Skip export.\n")
+ local stmt
+ if not app.sqliteFile then
+ log:write("[INFO ] No state file provided. Skip export.\n")
return
end
- -- Query to list Artifacts and their parents:
- -- SELECT GroupId.str AS 'GID', ArtifactId.str AS 'AID', Version.str AS 'Version', ParentGid.str AS 'ParentGid', ParentAid.str AS 'ParentAid', ParentVersion.str AS 'ParentVersion'
- -- FROM MvnArtifact AS A
- -- JOIN String GroupId ON GroupId.id = A.groupId
- -- JOIN String ArtifactId ON ArtifactId.id = A.artifactId
- -- JOIN String Version ON Version.id = A.version
- -- JOIN String ParentGid ON ParentGid.id = A.parentGroupId
- -- JOIN String ParentAid ON ParentAid.id = A.parentArtifactId
- -- JOIN String ParentVersion ON ParentVersion.id = A.parentVersion
- --
- -- Query to list dependencies:
- -- SELECT GroupId.str AS 'GID', ArtifactId.str AS 'AID', Version.str AS 'Version', DepGid.str AS 'Dependency GID', DepAid.str AS 'Dependnecy AID', DepVersion.str AS 'Dependency Version'
- -- FROM MvnArtifact AS A
- -- JOIN MvnDependency AS Dep ON Dep.mvnArtifactId = A.id
- -- JOIN MvnArtifact AS D ON Dep.needsMvnArtifactId = D.id
- -- JOIN String GroupId ON GroupId.id = A.groupId
- -- JOIN String ArtifactId ON ArtifactId.id = A.artifactId
- -- JOIN String Version ON Version.id = A.version
- -- JOIN String DepGid ON DepGid.id = D.groupId
- -- JOIN String DepAid ON DepAid.id = D.artifactId
- -- JOIN String DepVersion ON DepVersion.id = D.version
- --
- db = newSqlite{
- database = app.sqliteOutFile,
- }
- db:enhancePerf()
- db:prepare("CREATE TABLE String ("
- .." id INTEGER PRIMARY KEY,"
- .." str TEXT UNIQUE)"
- ):execute()
- db:prepare("CREATE TABLE MvnArtifact ("
- .." id INTEGER PRIMARY KEY,"
- .." groupId INT,"
- .." artifactId INT,"
- .." version INT,"
- .." parentGroupId INT,"
- .." parentArtifactId INT,"
- .." parentVersion INT)"
- ):execute()
- db:prepare("CREATE TABLE MvnDependency ("
- .." id INTEGER PRIMARY KEY,"
- .." mvnArtifactId INT,"
- .." needsMvnArtifactId INT)"
- ):execute()
- --db:prepare("CREATE TABLE MvnProperty ("
- -- .." id INTEGER PRIMARY KEY,"
- -- .." keyStringId INT,"
- -- .." valStringId INT)"
- --):execute()
- local mvnArtifactIds = {}
- local mvnArtifactIdsByArtif = {}
- local strings = {}
- local getStringId = function( str ) -- create/reUse strings on-demand
- if not str then return nil end
- local stringId = strings[str]
- if not stringId then
- local stmt = db:prepare("INSERT INTO String (str)VALUES(:str)")
- stmt:reset()
- stmt:bind(":str", str)
- stmt:execute()
- stringId = db:lastInsertRowid()
- strings[str] = stringId
- end
- return stringId
- end
- local stmtInsMvnArtifact = db:prepare("INSERT INTO MvnArtifact"
- .."('groupId', 'artifactId', 'version', 'parentGroupId', 'parentArtifactId', 'parentVersion')"
- .."VALUES"
- .."(:groupId , :artifactId , :version , :parentGroupId , :parentArtifactId , :parentVersion )")
- local insertMvnArtifact = function(a)
- assert(a.groupId and a.artifactId and a.version)
- if a.parentGroupId then assert(a.parentArtifactId and a.parentVersion)
- else assert(not a.parentArtifactId and not a.parentVersion) end
- stmtInsMvnArtifact:reset()
- stmtInsMvnArtifact:bind(":groupId", getStringId(a.groupId))
- stmtInsMvnArtifact:bind(":artifactId", getStringId(a.artifactId))
- stmtInsMvnArtifact:bind(":version", getStringId(a.version))
- stmtInsMvnArtifact:bind(":parentGroupId", getStringId(a.parentGroupId))
- stmtInsMvnArtifact:bind(":parentArtifactId", getStringId(a.parentArtifactId))
- stmtInsMvnArtifact:bind(":parentVersion", getStringId(a.parentVersion))
- stmtInsMvnArtifact:execute()
- local dbId = db:lastInsertRowid()
- mvnArtifactIds[a] = dbId -- TODO MUST be byString
- local bucket = mvnArtifactIdsByArtif[assert(a.artifactId)]
- if not bucket then bucket = {} mvnArtifactIdsByArtif[a.artifactId] = bucket end
- table.insert(bucket, { dbId = dbId, mvnArtifact = a, })
- return dbId
- end
+ local db = app.db
-- Store artifacts
for _, mvnArtifact in pairs(app.mvnArtifacts) do
- insertMvnArtifact(mvnArtifact)
+ mod.insertMvnArtifact(app, mvnArtifact)
local mvnDeps = app.mvnDepsByArtifact[mvnArtifact] or {}
-- dependencies are nothing else than artifacts
for _, mvnDep in pairs(mvnDeps) do
+ -- TODO?!?
end
end
-- Store dependencies
- local stmt = db:prepare("INSERT INTO MvnDependency"
- .."('mvnArtifactId', 'needsMvnArtifactId')"
- .."VALUES"
- .."(:mvnArtifactId , :needsMvnArtifactId )")
for _, mvnArtifact in pairs(app.mvnArtifacts) do
local mvnDeps = app.mvnDepsByArtifact[mvnArtifact]
for _, mvnDep in pairs(mvnDeps or {}) do
if not mvnDep.version then mvnDep.version = "TODO_5bbc0e87011e24d845136c5406302616" end
assert(mvnDep.version, mvnDep.artifactId)
assert(mvnDep.groupId and mvnDep.artifactId and mvnDep.version)
- local bucket = mvnArtifactIdsByArtif[mvnDep.artifactId]
+ local bucket = app.mvnArtifactIdsByArtif[mvnDep.artifactId]
local depId = nil
for _,a in pairs(bucket or {}) do
if mvnDep.groupId == a.mvnArtifact.groupId
@@ -502,135 +788,375 @@ function mod.storeAsSqliteFile( app )
end
end
if not depId then -- Artifact not stored yet. Do now.
- depId = insertMvnArtifact({
+ depId = mod.insertMvnArtifact(app, {
groupId = mvnDep.groupId,
artifactId = mvnDep.artifactId,
version = mvnDep.version,
})
end
+ -- maybe already in db?
+ local stmtStr = "SELECT id FROM MvnDependency"
+ .." WHERE mvnArtifactId = :mvnArtifactId AND needsMvnArtifactId = :needsMvnArtifactId"
+ local stmt = app.stmtCache[stmtStr];
+ if not stmt then stmt = db:prepare(stmtStr); app.stmtCache[stmtStr] = stmt end
stmt:reset()
- stmt:bind(":mvnArtifactId", assert(mvnArtifactIds[mvnArtifact]))
- stmt:bind(":needsMvnArtifactId", assert(depId, mvnDep.artifactId))
- stmt:execute()
+ stmt:bind(":mvnArtifactId", app.cachedMvnArtifactDbIds[mvnArtifact])
+ stmt:bind(":needsMvnArtifactId", depId)
+ local rs = stmt:execute()
+ if not rs:next() then -- not yet in db. create.
+ local stmtStr = "INSERT INTO MvnDependency"
+ .."('mvnArtifactId', 'needsMvnArtifactId')"
+ .."VALUES"
+ .."(:mvnArtifactId , :needsMvnArtifactId )"
+ local stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr); app.stmtCache[stmtStr] = stmt end
+ stmt:reset()
+ stmt:bind(":mvnArtifactId", assert(app.cachedMvnArtifactDbIds[mvnArtifact]))
+ stmt:bind(":needsMvnArtifactId", assert(depId, mvnDep.artifactId))
+ stmt:execute()
+ end
end
end
- db:close()
end
-function mod.run( app )
- assert(not app.mvnArtifacts) app.mvnArtifacts = {}
- assert(not app.mvnPropsByArtifact) app.mvnPropsByArtifact = {}
- assert(not app.mvnDepsByArtifact) app.mvnDepsByArtifact = {}
- assert(not app.mvnMngdDepsByArtifact) app.mvnMngdDepsByArtifact = {}
- local pomSrc = mod.newPomUrlSrc(app)
+function mod.dbOpen( app )
+ assert(not app.db)
+ app.db = newSqlite{
+ database = app.sqliteFile,
+ }
+ local db = app.db
+ db:prepare("BEGIN TRANSACTION"):execute()
+ db:enhancePerf()
+ db:prepare("CREATE TABLE IF NOT EXISTS String ("
+ .." id INTEGER PRIMARY KEY,"
+ .." str TEXT UNIQUE)"
+ ):execute()
+ db:prepare("CREATE TABLE IF NOT EXISTS MvnArtifact ("
+ .." id INTEGER PRIMARY KEY,"
+ .." groupId INT,"
+ .." artifactId INT,"
+ .." version INT,"
+ .." parentGroupId INT,"
+ .." parentArtifactId INT,"
+ .." parentVersion INT)"
+ ):execute()
+ db:prepare("CREATE TABLE IF NOT EXISTS MvnDependency ("
+ .." id INTEGER PRIMARY KEY,"
+ .." mvnArtifactId INT,"
+ .." needsMvnArtifactId INT)"
+ ):execute()
+ --db:prepare("CREATE TABLE MvnProperty ("
+ -- .." id INTEGER PRIMARY KEY,"
+ -- .." keyStringId INT,"
+ -- .." valStringId INT)"
+ --):execute()
+end
+
+
+function mod.dbCommit( app )
+ app.db:prepare("END TRANSACTION"):execute()
+ app.db:close()
+ app.db = nil
+end
+
+
+function mod.exportArtifacts(app)
+ mod.dbOpen(app)
+ local db = app.db
+ local stmtStr = ""
+ .." SELECT"
+ .." GroupId.str AS 'gid',"
+ .." ArtifactId.str AS 'aid',"
+ .." Version.str AS 'ver'"
+ .." FROM MvnArtifact AS A"
+ .." JOIN String GroupId ON GroupId.id = A.groupId"
+ .." JOIN String ArtifactId ON ArtifactId.id = A.artifactId"
+ .." JOIN String Version ON Version.id = A.version"
+ .." ORDER BY ArtifactId.str, Version.str"
+ local stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end
+ local rs = stmt:execute()
+ out:write("h;Title;List of Artifacts\n")
+ out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
+ out:write("c;GroupId;ArtifactId;Version\n")
+ while rs:next() do
+ out:write("r;".. rs:value(1) ..";".. rs:value(2) ..";".. rs:value(3) .."\n")
+ end
+ out:write("t;status;OK\n")
+end
+
+
+function mod.exportParents(app)
+ mod.dbOpen(app)
+ local db = app.db
+ local stmtStr = ""
+ .." SELECT"
+ .." GroupId.str AS 'GID',"
+ .." ArtifactId.str AS 'AID',"
+ .." Version.str AS 'Version',"
+ .." ParentGid.str AS 'ParentGid',"
+ .." ParentAid.str AS 'ParentAid',"
+ .." ParentVersion.str AS 'ParentVersion'"
+ .." FROM MvnArtifact AS A"
+ .." JOIN String GroupId ON GroupId.id = A.groupId"
+ .." JOIN String ArtifactId ON ArtifactId.id = A.artifactId"
+ .." JOIN String Version ON Version.id = A.version"
+ .." JOIN String ParentGid ON ParentGid.id = A.parentGroupId"
+ .." JOIN String ParentAid ON ParentAid.id = A.parentArtifactId"
+ .." JOIN String ParentVersion ON ParentVersion.id = A.parentVersion"
+ .." ORDER BY ParentAid.str, ParentVersion.str, ArtifactId.str, Version.str"
+ local stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end
+ local rs = stmt:execute()
+ out:write("h;Title;Parent relations\n")
+ out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
+ out:write("c;GroupId;ArtifactId;Version;ParentGid;ParentAid;ParentVersion\n")
+ while rs:next() do
+ out:write("r;".. rs:value(1) ..";".. rs:value(2) ..";".. rs:value(3)
+ ..";".. rs:value(4) ..";".. rs:value(5) ..";".. rs:value(6) .."\n")
+ end
+ out:write("t;status;OK\n")
+end
+
+
+function mod.exportParentsLatest(app)
+ mod.dbOpen(app)
+ local db = app.db
+ local stmtStr = ""
+ .." SELECT"
+ .." GroupId.str AS 'GID',"
+ .." ArtifactId.str AS 'AID',"
+ .." Version.str AS 'Version',"
+ .." ParentGid.str AS 'ParentGid',"
+ .." ParentAid.str AS 'ParentAid',"
+ .." ParentVersion.str AS 'ParentVersion'"
+ .." FROM MvnArtifact AS A"
+ .." JOIN String GroupId ON GroupId.id = A.groupId"
+ .." JOIN String ArtifactId ON ArtifactId.id = A.artifactId"
+ .." JOIN String Version ON Version.id = A.version"
+ .." JOIN String ParentGid ON ParentGid.id = A.parentGroupId"
+ .." JOIN String ParentAid ON ParentAid.id = A.parentArtifactId"
+ .." JOIN String ParentVersion ON ParentVersion.id = A.parentVersion"
+ local stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end
+ local rs = stmt:execute()
+ out:write("h;Title;Parent relations (latest only)\n")
+ out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
+ out:write("c;GroupId;ArtifactId;Version;ParentGid;ParentAid;ParentVersion\n")
+ -- Need to filter out the older artifacts.
+ local all = {}
+ while rs:next() do
+ local gid, aid, ver = rs:value(1), rs:value(2), rs:value(3)
+ local key = aid .."\t".. gid;
+ local entry = all[key]
+ local diff = (not entry)and -1 or mod.compareVersion(entry.ver, ver)
+ if diff > 0 then -- existing is newer. Keep it and ignore newer one.
+ goto nextRecord
+ else -- Either no entry yet or found a newer one.
+ local entry = { gid=false, aid=false, ver=false, pgid=false, paid=false, pver=false }
+ entry.gid = gid
+ entry.aid = aid
+ entry.ver = ver
+ entry.pgid = rs:value(4)
+ entry.paid = rs:value(5)
+ entry.pver = rs:value(6)
+ all[key] = entry
+ end
+::nextRecord::
+ end
+ -- Print
+ for _, entry in pairs(all) do
+ out:write("r;".. entry.gid ..";".. entry.aid ..";".. entry.ver
+ ..";".. entry.pgid ..";".. entry.paid ..";".. entry.pver .."\n")
+ end
+ out:write("t;status;OK\n")
+end
+
+
+function mod.exportDeps(app)
+ mod.dbOpen(app)
+ local db = app.db
+ local stmtStr = ""
+ .." SELECT"
+ .." GroupId.str AS 'GID',"
+ .." ArtifactId.str AS 'AID',"
+ .." Version.str AS 'Version',"
+ .." DepGid.str AS 'Dependency GID',"
+ .." DepAid.str AS 'Dependnecy AID',"
+ .." DepVersion.str AS 'Dependency Version'"
+ .." FROM MvnArtifact AS A"
+ .." JOIN MvnDependency AS Dep ON Dep.mvnArtifactId = A.id"
+ .." JOIN MvnArtifact AS D ON Dep.needsMvnArtifactId = D.id"
+ .." JOIN String GroupId ON GroupId.id = A.groupId"
+ .." JOIN String ArtifactId ON ArtifactId.id = A.artifactId"
+ .." JOIN String Version ON Version.id = A.version"
+ .." JOIN String DepGid ON DepGid.id = D.groupId"
+ .." JOIN String DepAid ON DepAid.id = D.artifactId"
+ .." JOIN String DepVersion ON DepVersion.id = D.version"
+ local stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end
+ local rs = stmt:execute()
+ out:write("h;Title;Dependencies (all known)\n")
+ out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
+ out:write("c;GroupId;ArtifactId;Version;Dependency GID;Dependency AID;Dependency Version\n")
+ while rs:next() do
+ out:write("r;".. rs:value(1) ..";".. rs:value(2) ..";".. rs:value(3)
+ ..";".. rs:value(4) ..";".. rs:value(5) ..";".. rs:value(6) .."\n")
+ end
+ out:write("t;status;OK\n")
+end
+
+
+function mod.exportDepsLatest(app)
+ mod.dbOpen(app)
+ local db = app.db
+ local stmtStr = ""
+ .." SELECT"
+ .." GroupId.str AS 'GID',"
+ .." ArtifactId.str AS 'AID',"
+ .." Version.str AS 'Version',"
+ .." DepGid.str AS 'Dependency GID',"
+ .." DepAid.str AS 'Dependnecy AID',"
+ .." DepVersion.str AS 'Dependency Version'"
+ .." FROM MvnArtifact AS A"
+ .." JOIN MvnDependency AS Dep ON Dep.mvnArtifactId = A.id"
+ .." JOIN MvnArtifact AS D ON Dep.needsMvnArtifactId = D.id"
+ .." JOIN String GroupId ON GroupId.id = A.groupId"
+ .." JOIN String ArtifactId ON ArtifactId.id = A.artifactId"
+ .." JOIN String Version ON Version.id = A.version"
+ .." JOIN String DepGid ON DepGid.id = D.groupId"
+ .." JOIN String DepAid ON DepAid.id = D.artifactId"
+ .." JOIN String DepVersion ON DepVersion.id = D.version"
+ local stmt = app.stmtCache[stmtStr]
+ if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end
+ local rs = stmt:execute()
+ out:write("h;Title;Dependencies (of latest only)\n")
+ out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
+ out:write("c;GroupId;ArtifactId;Version;Dependency GID;Dependency AID;Dependency Version\n")
+ -- Need to filter out the older artifacts.
+ local all = {}
+ local entry, key, gid, aid, ver, diff
+::nextRecord::
+ if not rs:next() then goto endFiltering end
+ gid, aid, ver = rs:value(1), rs:value(2), rs:value(3)
+ key = aid .."\t".. gid;
+ entry = all[key]
+ diff = (not entry)and -1 or mod.compareVersion(entry.ver, ver)
+ if diff > 0 then -- existing is newer. Keep it and ignore newer one.
+ goto nextRecord
+ else -- Either no entry yet or found a newer one.
+ local entry = { gid=false, aid=false, ver=false, dgid=false, daid=false, dver=false }
+ entry.gid = gid
+ entry.aid = aid
+ entry.ver = ver
+ entry.dgid = rs:value(4)
+ entry.daid = rs:value(5)
+ entry.dver = rs:value(6)
+ all[key] = entry
+ end
+ goto nextRecord
+::endFiltering::
+ -- Print
+ for _, entry in pairs(all) do
+ out:write("r;".. entry.gid ..";".. entry.aid ..";".. entry.ver
+ ..";".. entry.dgid ..";".. entry.daid ..";".. entry.dver .."\n")
+ end
+ out:write("t;status;OK\n")
+end
+
+
+function mod.scan( app )
+ mod.dbOpen(app)
+ table.insert(app.taskQueue, function()mod.fetchAnotherMvnArtifact(app)end)
while true do
- local pomUrl = pomSrc:nextPomUrl()
- if not pomUrl then break end
- local proto = pomUrl:match("^(https?)://")
- local isTLS = (proto:upper() == "HTTPS")
- local host = pomUrl:match("^https?://([^:/]+)[:/]")
- local port = pomUrl:match("^https?://[^:/]+:(%d+)[^%d]")
- local url = pomUrl:match("^https?://[^/]+(.*)$")
- if port == 443 then isTLS = true end
- if not port then port = (isTLS and 443 or 80) end
- log:write("> GET ".. proto .."://".. host ..":".. port .. url .."\n")
- local req = objectSeal{
- app = app,
- base = false,
- pomParser = false,
- }
- req.base = app.http:request{
- cls = req,
- host = assert(host), port = assert(port),
- method = "GET", url = url,
- --hdrs = ,
- useTLS = isTLS,
- onRspHdr = mod.onGetPomRspHdr,
- onRspChunk = mod.onGetPomRspChunk,
- onRspEnd = mod.onGetPomRspEnd,
- }
- req.base:closeSnk()
- end
- log:write("[INFO ] No more pom URLs\n")
+ local task = table.remove(app.taskQueue, 1)
+ if not task then
+ if #app.currentUrlsToFetch > 0 or #app.nextUrlsToFetch > 0 then
+ log:write("[WARN ] Huh2?!? ".. #app.currentUrlsToFetch .." ".. #app.nextUrlsToFetch
+ ..". Why are there still entries? Keep looping.\n")
+ table.insert(app.taskQueue, function()mod.fetchAnotherMvnArtifact(app)end)
+ goto nextTask
+ end
+ if #app.taskQueue > 0 then -- TODO fix this würgaround
+ goto nextTask
+ else
+ break
+ end
+ end
+ task()
+ ::nextTask::
+ end
mod.resolveDependencyVersionsFromDepsMgmnt(app)
mod.resolveProperties(app)
+ if #app.currentUrlsToFetch > 0 or #app.nextUrlsToFetch > 0 then
+ log:write("[WARN ] ".. #app.currentUrlsToFetch .." ".. #app.nextUrlsToFetch
+ ..". Why are there entries again?!?.\n")
+ error("WTF?!?")
+ return
+ end
+ mod.printStuffAtEnd(app)
mod.storeAsSqliteFile(app)
- --mod.printStuffAtEnd(app)
+ mod.dbCommit(app)
end
-function mod.newSocketMgr()
- local hosts = {}
- local openSock = function( t, opts )
- for k, v in pairs(opts) do
- if false then
- elseif k=='host' or k=='port' then
- elseif k=='useTLS' then
- if v then error('TLS not impl') end
- else
- error('Unknown option: '..tostring(k))
- end
- end
- local inaddr = inaddrOfHostname(opts.host)
- local af
- if inaddr:find('^%d+.%d+.%d+.%d+$') then af = AF_INET else af = AF_INET6 end
- local sock = socket(af, SOCK_STREAM, IPPROTO_TCP)
- sock:connect(inaddr, opts.port)
- log:write("opts.useTLS "..tostring(opts.useTLS).." (Override to TRUE ...)\n")
- opts.useTLS = true -- TODO why the heck is this needed? (I guess scriptlee bug?)
- if opts.useTLS then
- local sockUnderTls = sock
- sock = newTlsClient{
- cls = assert(sockUnderTls),
- peerHostname = assert(opts.host),
- onVerify = function( tlsIssues, sockUnderTls )
- if tlsIssues.CERT_NOT_TRUSTED then
- warn("TLS ignore CERT_NOT_TRUSTED");
- tlsIssues.CERT_NOT_TRUSTED = false
- end
- end,
- send = function( buf, sockUnderTls )
- local ret = sockUnderTls:write(buf)
- sockUnderTls:flush() -- TODO Why is this flush needed?
- return ret
- end,
- recv = function( sockUnderTls ) return sockUnderTls:read() end,
- flush = function( sockUnderTls ) sockUnderTls:flush() end,
- closeSnk = function( sockUnderTls ) sockUnderTls:closeSnk() end,
- }
- assert(not getmetatable(sock).release)
- getmetatable(sock).release = function( t ) sockUnderTls:release() end;
- end
- return {
- _sock = sock,
- write = function(t, ...) return sock:write(...)end,
- read = function(t, ...) return sock:read(...)end,
- flush = function(t, ...) return sock:flush(...)end,
- }
- end
- return{
- openSock = openSock,
- releaseSock = function(t, sockWrapr) t:closeSock(sockWrapr) end,
- closeSock = function(t, sockWrapr) sockWrapr._sock:release() end,
- }
+function mod.run( app )
+ if false then
+ elseif app.operation == "export-artifacts" then
+ mod.exportArtifacts(app)
+ elseif app.operation == "export-parents" then
+ mod.exportParents(app)
+ elseif app.operation == "export-parents-latest" then
+ mod.exportParentsLatest(app)
+ elseif app.operation == "export-deps" then
+ mod.exportDeps(app)
+ elseif app.operation == "export-deps-latest" then
+ mod.exportDepsLatest(app)
+ elseif app.operation == "yolo" then
+ assert(app.isYolo)
+ mod.scan(app)
+ else
+ log:write("InternalError '"..tostring(app.operation).."'\n")
+ end
end
function mod.main()
local app = objectSeal{
- http = newHttpClient{
- socketMgr = assert(mod.newSocketMgr()),
+ isYolo = false,
+ operation = false,
+ http = newHttpClient{},
+ mvnArtifacts = {},
+ mvnArtifactsAlreadyParsed = {}, -- if we already tried to fetch it (no matter what outcome)
+ mvnArtifactsNotFound = {},
+ mvnArtifactIdsByArtif = {},
+ mvnPropsByArtifact = {},
+ mvnDepsByArtifact = {},
+ mvnMngdDepsByArtifact = {},
+ taskQueue = {},
+ db = false,
+ sqliteFile = false,
+ stmtCache = {},
+ cachedMvnArtifactDbIds = {},
+ cachedStringIds = {},
+ pomSources = {
+ objectSeal{ type = "local-file-cache", repoDir = "C:/Users/fankhauseand/.m2/repository", },
+ objectSeal{ type = "webserver", baseUrl = "http://127.0.0.1:8081/tmp/artifactory", },
+ --objectSeal{ type = "webserver", baseUrl = "https://artifactory.pnet.ch/artifactory", },
+ },
+ -- Set of URLs that are currently processed
+ currentUrlsToFetch = {},
+ -- Set of URLs that need to be fetchet later (eg bcause dependency not fetched yet)
+ nextUrlsToFetch = {
+ -- TODO place URLs here (bcause there's no API to do this yet)
+ --{ artifactId = "trin-web", version = "02.01.07.00", groupId = "ch.post.it.paisa.trin" },
},
- mvnArtifacts = false,
- mvnPropsByArtifact = false,
- mvnDepsByArtifact = false,
- mvnMngdDepsByArtifact = false,
- sqliteOutFile = false,
}
if mod.parseArgs(app) ~= 0 then os.exit(1) end
mod.run(app)
end
-startOrExecute(nil, mod.main)
+--startOrExecute(nil, mod.main)
+startOrExecute(mod.main)
diff --git a/src/main/lua/misc/Asn1Digger.lua b/src/main/lua/misc/Asn1Digger.lua
new file mode 100644
index 0000000..16911e5
--- /dev/null
+++ b/src/main/lua/misc/Asn1Digger.lua
@@ -0,0 +1,148 @@
+
+
+local mod = {}
+local log, inn, out = io.stderr, io.stdin, io.stdout
+
+
+function mod.printHelp()
+ io.stdout:write(" \n"
+ .." Print ASN.1 from stdin to a textual representation on stdout.\n"
+ .." \n"
+ .." Options:\n"
+ .." \n"
+ .." -c <num>\n"
+ .." Number of columns to use for hex-dumps. Defaults to 16.\n"
+ .." \n"
+ .." --gpg\n"
+ .." Assume GPG and print additional info for it.\n"
+ .." \n")
+end
+
+
+function mod.parseArgs( app )
+ app.hexCols = 16
+ local i = 0 while true do i = i + 1
+ local arg = _ENV.arg[i]
+ if not arg then
+ break
+ elseif arg == "--help" then
+ app.isHelp = true; break
+ elseif arg == "-c" then
+ i = i + 1
+ app.hexCols = _ENV.arg[i]
+ if not app.hexCols or not app.hexCols:find("^[0-9]+$") or app.hexCols:len() > 8 then
+ log:write("EINVAL: -c ".. (app.hexCols or "needs a value") .."\n")
+ return -1
+ end
+ app.hexCols = tonumber(app.hexCols)
+ elseif arg == "--gpg" then
+ app.assumeGPG = true
+ else
+ log:write("EINVAL: ".. arg .."\n")
+ return -1
+ end
+ end
+ return 0
+end
+
+
+function mod.state_type( app )
+ local ty = app.src:read(1)
+ if not ty then -- EOF
+ app.isInnEof = true
+ return
+ end
+ ty = ty:byte(1)
+ app.type = ty
+ app.funcToCall = mod.state_length
+end
+
+
+function mod.state_length( app )
+ local len = app.src:read(1)
+ assert(len, len)
+ len = len:byte(1)
+ app.len = len
+ app.funcToCall = mod.state_value
+end
+
+
+function mod.state_value( app )
+ assert(type(app.len) == "number", app.len)
+ local val = app.src:read(app.len)
+ assert(val, val)
+ local typeName
+ if app.type == 0x00 then typeName = "EndOfContent" end
+ if app.type == 0x02 then typeName = "integer" end
+ if app.type == 0x04 then typeName = "octet string" end
+ if app.type == 0x0C then typeName = "utf8 string" end
+ if not typeName then
+ local tagClass = false
+ or ((app.type & 0xC0) == 0) and "Universal"
+ or ((app.type & 0x40) == 0) and "Application"
+ or ((app.type & 0x80) == 0) and "Context-specifc"
+ or "Private"
+ local primOrConstr = ((app.type & 0x20) == 0)and"primitive"or"constructed"
+ local isLongType = (app.type & 0x1F == 0x1F)
+ local fmt = (isLongType) and("%s, %s, %s") or("subType 0x%02X, %s, %s")
+ typeName = string.format(fmt,
+ isLongType and("LongType") or(app.type & 0x1F),
+ tagClass,
+ primOrConstr)
+ end
+ app.dst:write(string.format("ASN.1 type 0x%02X, len %d (%s), value:", app.type, app.len, typeName))
+ if app.assumeGPG then
+ if app.type == 0x95 then
+ app.dst:write(string.format("\nGPG secret key packet, version %d", val:byte(2)))
+ elseif app.type == 0x99 and app.len == 1 and val:byte(1) == 0x0D then
+ app.dst:write("\nGPG certificate")
+ end
+ end
+ local line = ""
+ local i = 0 while i < val:len() do i = i + 1
+ if (i-1) % app.hexCols == 0 then app.dst:write(string.format("%s\n %08X:", line, i-1)); line = " " end
+ local char = val:byte(i)
+ app.dst:write(string.format(" %02X", char))
+ local isPrintable = false
+ or (char >= 0x20 and char <= 0x7E)
+ line = line .. (isPrintable and string.char(char) or ".")
+ end
+ if line:len() > 0 then
+ i = i % app.hexCols
+ while i < app.hexCols do app.dst:write(" "); i = i + 1 end
+ app.dst:write(line)
+ end
+ app.dst:write("\n")
+ app.funcToCall = mod.state_type
+end
+
+
+function mod.run( app )
+ app.src = io.stdin
+ app.dst = io.stdout
+ app.funcToCall = mod.state_type
+ while not app.isInnEof do
+ app.funcToCall(app)
+ end
+end
+
+
+function mod.main()
+ local app = {
+ isHelp = false,
+ assumeGPG = false,
+ src = false,
+ dst = false,
+ isInnEof = false,
+ funcToCall = false,
+ type = false,
+ len = false,
+ value = false,
+ }
+ if mod.parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then mod.printHelp() return end
+ mod.run(app)
+end
+
+
+mod.main()
diff --git a/src/main/nodejs/ResClone/resclone.js b/src/main/nodejs/ResClone/resclone.js
index 2998aa5..1f31803 100644
--- a/src/main/nodejs/ResClone/resclone.js
+++ b/src/main/nodejs/ResClone/resclone.js
@@ -1,12 +1,14 @@
;(function(){ "use strict";
-const Agent = require("http").Agent;
+const HttpAgent = require("http").Agent;
+const HttpsAgent = require("https").Agent;
const N = null;
const URL = require("url").URL;
const assert = require("assert");
const fs = require("fs");
const http = require("http");
+const https = require("https");
const isArray = Array.isArray;
const isBuffer = Buffer.isBuffer;
const isInteger = Number.isInteger;
@@ -96,10 +98,15 @@ function parseArgs( app, argv ){
function run( app ){
assert(app.httpAgent == null);
- app.httpAgent = new Agent({
+ app.httpAgent = new HttpAgent({
keepAlive: true,
maxSockets: 4, // DON'T kill houston
});
+ assert(app.httpsAgent == null);
+ app.httpsAgent = new HttpsAgent({
+ keepAlive: true,
+ maxSockets: 4, // DON'T kill houston
+ })
const rootNode = newNode();
rootNode.url = app.url;
fetchCollection(app, rootNode);
@@ -128,7 +135,9 @@ function fetchCollection( app, node ){
bodyChunks: [],
});
log.write("> GET "+ node.url +"\n");
- collection.base = http.get(node.url, {agent:app.httpAgent}, onCollectionResponseHdr.bind(N, collection));
+ var client = (node.url.startsWith("https://") ? https : http);
+ var agent = (node.url.startsWith("https://") ? app.httpsAgent : http.agent);
+ collection.base = client.get(node.url, {agent:agent}, onCollectionResponseHdr.bind(N, collection));
collection.base.on("error", function( ex ){ throw ex; });
collection.base.end();
}
@@ -136,7 +145,7 @@ function fetchCollection( app, node ){
function onCollectionResponseHdr( collection, rsp ){
if( rsp.statusCode != 200 ){
- log.write("< HTTP "+ rsp.statusCode +"\n");
+ log.write("< HTTP "+ rsp.statusCode +" "+ rsp.statusMessage +"\n");
Object.keys(rsp.headers).forEach(function( key ){
log.write("< "+ key +": "+ rsp.headers[key] +"\n");
});
@@ -444,6 +453,7 @@ function main( argv ){
archivePath: null,
archiveFd: null,
httpAgent: null,
+ httpsAgent: null,
numPendingCollectionRequests: 0,
numPendingTarWriteRequests: 0,
});
diff --git a/src/main/patch/houston/default-20230203.patch b/src/main/patch/houston/default-20230203.patch
new file mode 100644
index 0000000..c1deeca
--- /dev/null
+++ b/src/main/patch/houston/default-20230203.patch
@@ -0,0 +1,52 @@
+
+ My custom patch ready-to-apply to have an "usable" houston.
+
+ Contains:
+ - Simplelogger
+ - Listen localhost only
+ - Queue-Retry every 5 seconds.
+
+diff --git a/houston-process/pom.xml b/houston-process/pom.xml
+index fff9c178..960c0098 100644
+--- a/houston-process/pom.xml
++++ b/houston-process/pom.xml
+@@ -20,6 +20,26 @@
+ <skip.node.install>true</skip.node.install>
+ </properties>
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.houston</groupId>
+diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+index ee7d8b02..b28ae8d6 100644
+--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+@@ -256,7 +256,7 @@ public class Deployer {
+ qc.add(new QueueConfiguration().withPattern("brox-from-vehicles-.*").withRetryIntervals(10, 20, 30, 60, 120)
+ .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(10_000));
+ // All other queues (typically to backend services) with a slow-down pattern after failed delivery
+- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(10, 20, 30, 60, 120));
++ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5));/*TODO revert*/
+
+ RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
+ .address(Address.redisquesAddress())
diff --git a/src/main/patch/houston/default-20230214.patch b/src/main/patch/houston/default-20230214.patch
new file mode 100644
index 0000000..3f8fa16
--- /dev/null
+++ b/src/main/patch/houston/default-20230214.patch
@@ -0,0 +1,56 @@
+
+ My custom patch ready-to-apply to have an "usable" houston.
+
+ Contains:
+ - Simplelogger
+ - Listen localhost only
+ - Queue-Retry every 5 seconds.
+
+ Patch based on "houston-02.01.12.00" aka
+ "3c61ef7ded53d6340001d2b56cd829d2ae450580" from 2023-01-12.
+
+diff --git a/houston-process/pom.xml b/houston-process/pom.xml
+index fff9c178..960c0098 100644
+--- a/houston-process/pom.xml
++++ b/houston-process/pom.xml
+@@ -20,6 +20,26 @@
+ </properties>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.houston</groupId>
+diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+index ee7d8b02..b28ae8d6 100644
+--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+@@ -309,7 +309,7 @@ public class Deployer {
+ qc.add(new QueueConfiguration().withPattern("timetable-situation-trips-for-aws").withRetryIntervals(standardDelays)
+ .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(100));
+ // All other queues (typically to backend services) with a slow-down pattern after failed delivery
+- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
++ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
+
+ RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
+ .address(Address.redisquesAddress())
+
diff --git a/src/main/patch/houston/default-20230331.patch b/src/main/patch/houston/default-20230331.patch
new file mode 100644
index 0000000..64d3628
--- /dev/null
+++ b/src/main/patch/houston/default-20230331.patch
@@ -0,0 +1,56 @@
+
+ My custom patch ready-to-apply to have an "usable" houston.
+
+ Contains:
+ - Simplelogger
+ - Listen localhost only
+ - Queue-Retry every 5 seconds.
+
+ Patch based on "houston-02.01.12.00" aka
+ "3c61ef7ded53d6340001d2b56cd829d2ae450580" from 2023-01-12.
+
+diff --git a/houston-process/pom.xml b/houston-process/pom.xml
+index fff9c178..960c0098 100644
+--- a/houston-process/pom.xml
++++ b/houston-process/pom.xml
+@@ -20,6 +20,26 @@
+ </properties>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.houston</groupId>
+diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+index ee7d8b02..b28ae8d6 100644
+--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+@@ -309,7 +309,7 @@ public class Deployer {
+ qc.add(new QueueConfiguration().withPattern("timetable-situation-trips-for-aws").withRetryIntervals(standardDelays)
+ .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(100));
+ // All other queues (typically to backend services) with a slow-down pattern after failed delivery
+- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
++ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
+
+ RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
+ .address(Address.redisquesAddress())
+
diff --git a/src/main/patch/houston/default-20230503.patch b/src/main/patch/houston/default-20230503.patch
new file mode 100644
index 0000000..64d3628
--- /dev/null
+++ b/src/main/patch/houston/default-20230503.patch
@@ -0,0 +1,56 @@
+
+ My custom patch ready-to-apply to have an "usable" houston.
+
+ Contains:
+ - Simplelogger
+ - Listen localhost only
+ - Queue-Retry every 5 seconds.
+
+ Patch based on "houston-02.01.12.00" aka
+ "3c61ef7ded53d6340001d2b56cd829d2ae450580" from 2023-01-12.
+
+diff --git a/houston-process/pom.xml b/houston-process/pom.xml
+index fff9c178..960c0098 100644
+--- a/houston-process/pom.xml
++++ b/houston-process/pom.xml
+@@ -20,6 +20,26 @@
+ </properties>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.houston</groupId>
+diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+index ee7d8b02..b28ae8d6 100644
+--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+@@ -309,7 +309,7 @@ public class Deployer {
+ qc.add(new QueueConfiguration().withPattern("timetable-situation-trips-for-aws").withRetryIntervals(standardDelays)
+ .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(100));
+ // All other queues (typically to backend services) with a slow-down pattern after failed delivery
+- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
++ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
+
+ RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
+ .address(Address.redisquesAddress())
+
diff --git a/src/main/patch/houston/default.patch b/src/main/patch/houston/default.patch
new file mode 100644
index 0000000..d70b12b
--- /dev/null
+++ b/src/main/patch/houston/default.patch
@@ -0,0 +1,74 @@
+
+ Custom houston patch to have a "usable" service at all.
+
+ Patch based on "develop" aka
+ "3b1275e123c2b7aa2ffaa34270a5e1a373a65993" from "2023-04-27".
+
+
+diff --git a/pom.xml b/pom.xml
+index 0ed4f7f3..b44c5693 100644
+--- a/pom.xml
++++ b/pom.xml
+@@ -72,8 +72,6 @@
+ <skip.wagon>false</skip.wagon>
+ <skip.copy-dependencies>false</skip.copy-dependencies>
+
+- <!-- spotless -->
+- <source.format.apply.phase>compile</source.format.apply.phase>
+ </properties>
+
+ <scm>
+diff --git a/houston-process/pom.xml b/houston-process/pom.xml
+index 374dcb97..3c24937c 100644
+--- a/houston-process/pom.xml
++++ b/houston-process/pom.xml
+@@ -25,6 +25,26 @@
+ </properties>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.houston</groupId>
+diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+index 432efb01..d1729fe9 100644
+--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+@@ -68,6 +68,9 @@ public class Deployer {
+ private static final Logger LOGGER = LoggerFactory.getLogger(Deployer.class);
+
+ public static void main(String[] args) throws Exception {
++ boolean isAssertIsEnabled = false;
++ assert isAssertIsEnabled = true;
++ if (!isAssertIsEnabled) throw new UnsupportedOperationException("Enable assertions to fix this problem -> https://stackoverflow.com/a/68893479/4415884");
+ setStartupProperties();
+ Props.prepare();
+
+@@ -378,7 +378,7 @@ public class Deployer {
+ // All other queues (typically to backend services) with a slow-down pattern after
+ // failed delivery
+ qc.add(
+- new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
++ new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
+
+ RedisquesConfiguration redisquesConfig =
+ RedisquesConfiguration.with()
diff --git a/src/main/shell/BackupByRsync/backup.sh b/src/main/shell/BackupByRsync/backup.sh
index 4dae088..40189c2 100755
--- a/src/main/shell/BackupByRsync/backup.sh
+++ b/src/main/shell/BackupByRsync/backup.sh
@@ -66,6 +66,7 @@ run () {
--exclude=".git/refs/remotes" \
--exclude=".git/refs/tags" \
--exclude=".idea" \
+ --exclude="/.git-credentials" \
--exclude="/.NERDTreeBookmarks" \
--exclude="/.Xauthority" \
--exclude="/.bash_history" \
@@ -81,8 +82,6 @@ run () {
--exclude="/.config/JetBrains" \
--exclude="/.gdb_history" \
--exclude="/.lesshst" \
- --exclude="/.profile" \
- --exclude="/.vimrc" \
--exclude="/.xsession-errors" \
--exclude="/.xsession-errors.old" \
--exclude="/mnt" \
@@ -98,24 +97,13 @@ run () {
--exclude="/Downloads" \
--exclude="/crashdumps" \
--exclude="/images" \
- --exclude="/projects/apple/cups" \
- --exclude="/projects/gnu" \
- --exclude="/projects/lua" \
- --exclude="/projects/misc/OpenSSL" \
- --exclude="/projects/misc/OpenVPN" \
- --exclude="/projects/misc/busybox" \
- --exclude="/projects/misc/cgit" \
- --exclude="/projects/misc/dash" \
- --exclude="/projects/misc/endlessh" \
- --exclude="/projects/misc/jssc" \
- --exclude="/projects/misc/libqrencode" \
- --exclude="/projects/misc/mbedtls" \
- --exclude="/projects/misc/openbox" \
+ --exclude="/projects/forks" \
--exclude="cee-misc-lib/external" \
--exclude="cee-misc-lib/tmp" \
--exclude="/tmp" \
--exclude="/virtualbox-*" \
--exclude="/vmshare" \
+ --exclude="/vm-qemu" \
"${DIR_FROM:?}" \
"${BACKUP_PATH:?}/${DST_PREFIX}" \
;
diff --git a/src/test/java/ch/hiddenalpha/unspecifiedgarbage/crypto/Foo.java b/src/test/java/ch/hiddenalpha/unspecifiedgarbage/crypto/Foo.java
new file mode 100644
index 0000000..a7d326f
--- /dev/null
+++ b/src/test/java/ch/hiddenalpha/unspecifiedgarbage/crypto/Foo.java
@@ -0,0 +1,60 @@
+package ch.hiddenalpha.unspecifiedgarbage.crypto;
+
+import javax.crypto.Cipher;
+import javax.crypto.CipherOutputStream;
+import javax.crypto.NoSuchPaddingException;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.security.InvalidKeyException;
+import java.security.KeyFactory;
+import java.security.NoSuchAlgorithmException;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.RSAPublicKeySpec;
+import java.security.spec.X509EncodedKeySpec;
+
+import static java.lang.System.err;
+import static java.lang.System.in;
+import static java.lang.System.out;
+
+
+public class Foo {
+
+ public static void main(String[] args) throws Exception {
+ if (args.length != 1) {
+ err.println("\nUsage:\n Pass path to public key as the only arg.\n Then write to stdin and you'll get encrypted data on stdout.\n");
+ System.exit(1);
+ }
+ var that = new Foo();
+ that.encryptKey = args[0];
+ try( var snk = that.newEncryptionFilter(out); ){
+ byte[] buf = new byte[8192];
+ while( true ){
+ int readLen = in.read(buf, 0, buf.length);
+ if( readLen == -1 ) break; /*EOF*/
+ assert readLen > 0;
+ snk.write(buf, 0, readLen);
+ }
+ }
+ }
+
+ private String encryptKey;
+
+ private OutputStream newEncryptionFilter(OutputStream dst) throws IOException, NoSuchPaddingException, NoSuchAlgorithmException, InvalidKeySpecException, InvalidKeyException
+ {
+ byte[] rawKey;
+ try (var is = new FileInputStream(encryptKey)) {
+ rawKey = is.readAllBytes();
+ }
+ String algoStr = "RSA"/*TODO this info is available inside an PKCS8 file so take it from there automatically*/;
+ var keySpec = new java.security.spec.PKCS8EncodedKeySpec(rawKey, algoStr);
+ err.printf("format: %s\n", keySpec.getFormat());
+ err.printf("algo: %s\n", keySpec.getAlgorithm());
+ var keyFactory = KeyFactory.getInstance(keySpec.getAlgorithm());
+ var key = keyFactory.generatePublic(keySpec);
+ var cipher = Cipher.getInstance("AES");
+ cipher.init(Cipher.ENCRYPT_MODE, key);
+ return new CipherOutputStream(dst, cipher);
+ }
+
+}
diff --git a/src/test/java/ch/hiddenalpha/unspecifiedgarbage/format/FormatUtilsTest.java b/src/test/java/ch/hiddenalpha/unspecifiedgarbage/format/FormatUtilsTest.java
deleted file mode 100644
index df7cd48..0000000
--- a/src/test/java/ch/hiddenalpha/unspecifiedgarbage/format/FormatUtilsTest.java
+++ /dev/null
@@ -1,113 +0,0 @@
-package ch.hiddenalpha.unspecifiedgarbage.format;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-
-public class FormatUtilsTest {
-
- @Test(expected = IllegalArgumentException.class)
- public void worksAsExpectedWithN0() {
- FormatUtils.toStr(123F, 0);
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void worksAsExpectedWithN8() {
- FormatUtils.toStr(123F, 8);
- }
-
- @Test
- public void shortensZero() {
- for( int i = 0 ; i < 7 ; ++i ){
- Assert.assertEquals("0", FormatUtils.toStr(0, i + 1));
- }
- }
-
- @Test
- public void worksAsExpectedWithN1() {
- Assert.assertEquals("1e-09", FormatUtils.toStr(0.0000000012345F, 1));
- Assert.assertEquals("1e-08", FormatUtils.toStr(0.000000012345F, 1));
- Assert.assertEquals("1e-07", FormatUtils.toStr(0.00000012345F, 1));
- Assert.assertEquals("1e-06", FormatUtils.toStr(0.0000012345F, 1));
- Assert.assertEquals("1e-05", FormatUtils.toStr(0.000012345F, 1));
- Assert.assertEquals("0.0001", FormatUtils.toStr(0.00012345F, 1));
- Assert.assertEquals("0.001", FormatUtils.toStr(0.0012345F, 1));
- Assert.assertEquals("0.01", FormatUtils.toStr(0.012345F, 1));
- Assert.assertEquals("0.1", FormatUtils.toStr(0.12345F, 1));
- Assert.assertEquals("1", FormatUtils.toStr(1.2345F, 1));
- Assert.assertEquals("12", FormatUtils.toStr(12.345F, 1));
- Assert.assertEquals("123", FormatUtils.toStr(123.45F, 1));
- Assert.assertEquals("1234", FormatUtils.toStr(1234.5F, 1));
- Assert.assertEquals("1e+04", FormatUtils.toStr(12345F, 1));
- Assert.assertEquals("1e+05", FormatUtils.toStr(123450F, 1));
- Assert.assertEquals("1e+06", FormatUtils.toStr(1234500F, 1));
- Assert.assertEquals("1e+07", FormatUtils.toStr(12345000F, 1));
- Assert.assertEquals("1e+08", FormatUtils.toStr(123450000F, 1));
- Assert.assertEquals("1e+09", FormatUtils.toStr(1234500000F, 1));
- }
-
- @Test
- public void worksAsExpectedWithN2() {
- Assert.assertEquals("1.2e-09", FormatUtils.toStr(0.0000000012345F, 2));
- Assert.assertEquals("1.2e-08", FormatUtils.toStr(0.000000012345F, 2));
- Assert.assertEquals("1.2e-07", FormatUtils.toStr(0.00000012345F, 2));
- Assert.assertEquals("1.2e-06", FormatUtils.toStr(0.0000012345F, 2));
- Assert.assertEquals("1.2e-05", FormatUtils.toStr(0.000012345F, 2));
- Assert.assertEquals("0.00012", FormatUtils.toStr(0.00012345F, 2));
- Assert.assertEquals("0.0012", FormatUtils.toStr(0.0012345F, 2));
- Assert.assertEquals("0.012", FormatUtils.toStr(0.012345F, 2));
- Assert.assertEquals("0.12", FormatUtils.toStr(0.12345F, 2));
- Assert.assertEquals("1.2", FormatUtils.toStr(1.2345F, 2));
- Assert.assertEquals("12", FormatUtils.toStr(12.345F, 2));
- Assert.assertEquals("123", FormatUtils.toStr(123.45F, 2));
- Assert.assertEquals("1234", FormatUtils.toStr(1234.5F, 2));
- Assert.assertEquals("12345", FormatUtils.toStr(12345F, 2));
- Assert.assertEquals("123450", FormatUtils.toStr(123450F, 2));
- Assert.assertEquals("1.2e+06", FormatUtils.toStr(1234500F, 2));
- Assert.assertEquals("1.2e+07", FormatUtils.toStr(12345000F, 2));
- Assert.assertEquals("1.2e+08", FormatUtils.toStr(123450000F, 2));
- Assert.assertEquals("1.2e+09", FormatUtils.toStr(1234500000F, 2));
- }
-
- @Test
- public void worksAsExpectedWithN3() {
- Assert.assertEquals("1.23e-09", FormatUtils.toStr(0.0000000012345F, 3));
- Assert.assertEquals("1.23e-08", FormatUtils.toStr(0.000000012345F, 3));
- Assert.assertEquals("1.23e-07", FormatUtils.toStr(0.00000012345F, 3));
- Assert.assertEquals("1.23e-06", FormatUtils.toStr(0.0000012345F, 3));
- Assert.assertEquals("1.23e-05", FormatUtils.toStr(0.000012345F, 3));
- Assert.assertEquals("0.000123", FormatUtils.toStr(0.00012345F, 3));
- Assert.assertEquals("0.00123", FormatUtils.toStr(0.0012345F, 3));
- Assert.assertEquals("0.0123", FormatUtils.toStr(0.012345F, 3));
- Assert.assertEquals("0.123", FormatUtils.toStr(0.12345F, 3));
- Assert.assertEquals("1.23", FormatUtils.toStr(1.2345F, 3));
- Assert.assertEquals("12.3", FormatUtils.toStr(12.345F, 3));
- Assert.assertEquals("123", FormatUtils.toStr(123.45F, 3));
- Assert.assertEquals("1234", FormatUtils.toStr(1234.5F, 3));
- Assert.assertEquals("12345", FormatUtils.toStr(12345F, 3));
- Assert.assertEquals("123450", FormatUtils.toStr(123450F, 3));
- Assert.assertEquals("1234500", FormatUtils.toStr(1234500F, 3));
- Assert.assertEquals("1.23e+07", FormatUtils.toStr(12345000F, 3));
- Assert.assertEquals("1.23e+08", FormatUtils.toStr(123450000F, 3));
- Assert.assertEquals("1.23e+09", FormatUtils.toStr(1234500000F, 3));
- }
-
- @Test
- public void worksAsExpectedWithN4() {
- Assert.assertEquals("1.234e-05", FormatUtils.toStr(0.000012345F, 4));
- Assert.assertEquals("0.0001234", FormatUtils.toStr(0.00012345F, 4));
- Assert.assertEquals("0.001234", FormatUtils.toStr(0.0012345F, 4));
- Assert.assertEquals("0.01235", FormatUtils.toStr(0.012345F, 4));
- Assert.assertEquals("0.1235", FormatUtils.toStr(0.12345F, 4));
- Assert.assertEquals("1.235", FormatUtils.toStr(1.2345F, 4));
- Assert.assertEquals("12.35", FormatUtils.toStr(12.345F, 4));
- Assert.assertEquals("123.4", FormatUtils.toStr(123.45F, 4));
- Assert.assertEquals("1234", FormatUtils.toStr(1234.5F, 4));
- Assert.assertEquals("12345", FormatUtils.toStr(12345F, 4));
- Assert.assertEquals("123450", FormatUtils.toStr(123450F, 4));
- Assert.assertEquals("1234500", FormatUtils.toStr(1234500F, 4));
- Assert.assertEquals("12345000", FormatUtils.toStr(12345000F, 4));
- Assert.assertEquals("1.235e+08", FormatUtils.toStr(123450000F, 4));
- }
-
-}