From e5f06ce9206bc790209fb1ca217b1cb9292f0547 Mon Sep 17 00:00:00 2001 From: Dusty Daemon Date: Mon, 18 Dec 2023 18:42:43 -0500 Subject: [PATCH] splice: script for complex splices WIP WIP --- common/splice_script.c | 1789 +++++++++++++++++++++++++++++++ common/splice_script.h | 61 ++ common/test/Makefile | 10 + common/test/run-splice_script.c | 75 ++ plugins/Makefile | 9 +- 5 files changed, 1942 insertions(+), 2 deletions(-) create mode 100644 common/splice_script.c create mode 100644 common/splice_script.h create mode 100644 common/test/run-splice_script.c diff --git a/common/splice_script.c b/common/splice_script.c new file mode 100644 index 000000000000..fb542134a55e --- /dev/null +++ b/common/splice_script.c @@ -0,0 +1,1789 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define ARROW_SYMBOL "->" +#define PIPE_SYMBOL '|' +#define AT_SYMBOL '@' +#define COLON_SYMBOL ':' +#define Q_SYMBOL '?' +#define WILD_SYMBOL '*' + +#define PERCENT_REGEX "^([0-9]*)[.]?([0-9]*)%$" +#define Q_REGEX "^\\?$" +#define WILD_REGEX "^\\*$" +#define CHANID_REGEX "^[0-9A-Za-z]{64}$" +#define NODEID_REGEX "^0[23][0-9A-Za-z]{64}$" +#define WALLET_REGEX "^wallet$" +#define SATM_REGEX "^([0-9]*)[.]?([0-9]*)[Mm]$" +#define SATK_REGEX "^([0-9]*)[.]?([0-9]*)[Kk]$" + +#define CODE_SNIPPET_PADDING 80 + +/* Minimum # of matching charaters to autocomplete nodeid or chanid */ +#define NODEID_MIN_CHARS 4 +#define CHANID_MIN_CHARS 4 + +/* Token types from simplest to most complex. */ +enum token_type { + TOK_CHAR, + TOK_DELIMITER, /* Newline or semicolon. Guaranteed one at end. */ + TOK_ARROW, + TOK_STR, + TOK_PIPE, /* lease seperator "|", ex 5M|3M (add 5M, lease 3M) */ + TOK_COLON, /* node query seperator ":", ex nodeid:0 */ + TOK_ATSYM, /* lease rate seperator "@", ex 3M@2% */ + TOK_PERCENT, /* ie "80%" */ + TOK_SATS, /* ie 8M or 0 */ + TOK_QUESTION, /* ie "?" */ + TOK_WILDCARD, /* ie "*" */ + TOK_CHANID, + TOK_WALLET, + TOK_NODEID, + TOK_BTCADDR, + TOK_CHANQUERY, /* ie nodeid:? */ + TOK_MULTI_CHANID, /* Matches stored in ->right */ + TOK_LEASEREQ, + TOK_LEASERATE, /* ie @2% */ + TOK_SEGMENT, /* An entire line or semicolon seperated segment */ +}; + +struct token { + enum token_type type; + size_t script_index; /* For error messages */ + char c; + char *str, *rstr; + u32 ppm; + struct amount_sat amount_sat; + struct node_id *node_id; + struct channel_id *chan_id; + struct token *left, *middle, *right; +}; + +static struct token *new_token(const tal_t *ctx, enum token_type token_type, + size_t script_index) +{ + struct token *token = talz(ctx, struct token); + + token->type = token_type; + token->script_index = script_index; + + return token; +} + +static const char *token_type_str(enum token_type type) +{ + switch (type) { + case TOK_CHAR: return "TOK_CHAR"; + case TOK_DELIMITER: return "TOK_DELIMITER"; + case TOK_ARROW: return "TOK_ARROW"; + case TOK_STR: return "TOK_STR"; + case TOK_PIPE: return "TOK_PIPE"; + case TOK_ATSYM: return "TOK_ATSYM"; + case TOK_COLON: return "TOK_COLON"; + case TOK_SATS: return "TOK_SATS"; + case TOK_PERCENT: return "TOK_PERCENT"; + case TOK_QUESTION: return "TOK_QUESTION"; + case TOK_WILDCARD: return "TOK_WILDCARD"; + case TOK_CHANID: return "TOK_CHANID"; + case TOK_WALLET: return "TOK_WALLET"; + case TOK_NODEID: return "TOK_NODEID"; + case TOK_BTCADDR: return "TOK_BTCADDR"; + case TOK_CHANQUERY: return "TOK_CHANQUERY"; + case TOK_MULTI_CHANID: return "TOK_MULTI_CHANID"; + case TOK_LEASEREQ: return "TOK_LEASEREQ"; + case TOK_LEASERATE: return "TOK_LEASERATE"; + case TOK_SEGMENT: return "TOK_SEGMENT"; + } +} + +static char *chan_id_to_hexstr(const tal_t *ctx, struct channel_id channel_id) +{ + char *str = (char*)tal_arr(ctx, u8, hex_str_size(32)); + + if (!hex_encode(channel_id.id, 32, str, hex_str_size(32))) + return NULL; + + return str; +} + +static void dump_token_shallow(char **str, struct token *token, char *prefix) +{ + const char *tmp; + + tal_append_fmt(str, "%s%zu:%s", prefix, token->script_index, + token_type_str(token->type)); + + if (token->c) { + tal_append_fmt(str, " char:"); + if (token->c == '\n') + tal_append_fmt(str, "'\\n'"); + else if (token->c == '\r') + tal_append_fmt(str, "'\\r'"); + else if (token->c == '\t') + tal_append_fmt(str, "'\\t'"); + else if (token->c < ' ') + tal_append_fmt(str, "0x%02x", token->c); + else + tal_append_fmt(str, "'%c'", token->c); + } + + if (token->str) + tal_append_fmt(str, " str:\"%s\"", token->str); + + if (token->node_id) + tal_append_fmt(str, " node_id:%s", + node_id_to_hexstr(tmpctx, token->node_id)); + + if (token->chan_id) + tal_append_fmt(str, " chan_id:%s", + chan_id_to_hexstr(tmpctx, *token->chan_id)); + + if (token->ppm) + tal_append_fmt(str, " ppm:%u", token->ppm); + + if (!amount_sat_zero(token->amount_sat) || token->type == TOK_SATS) { + tmp = fmt_amount_sat(tmpctx, token->amount_sat); + tal_append_fmt(str, " amnt:%s", tmp); + tal_free(tmp); + } +} + +/* Returns the indent used */ +static int dump_token(char **str, struct token *token, int indent, char *prefix) +{ + if (token->left) + indent = dump_token(str, token->left, indent, "l ") + 1; + + for (int i = 0; i < indent; i++) + tal_append_fmt(str, " "); + + dump_token_shallow(str, token, prefix); + + tal_append_fmt(str, "\n"); + + if (token->middle) + dump_token(str, token->middle, indent, "m "); + + if (token->right) + dump_token(str, token->right, indent + 1, "r "); + + return indent; +} + +static struct splice_script_error *debug_dump(const tal_t *ctx, + struct token **tokens) +{ + struct splice_script_error *error = tal(ctx, + struct splice_script_error); + + error->type = DEBUG_DUMP; + error->script_index = 0; + error->message = tal_strdup(error, ""); + + for (size_t i = 0; i < tal_count(tokens); i++) + dump_token(&error->message, tokens[i], 0, "- "); + + return error; +} + +static struct splice_script_error *dump_segments(const tal_t *ctx, + struct token **tokens) +{ + struct splice_script_error *error = tal(ctx, + struct splice_script_error); + + error->type = DEBUG_DUMP; + error->script_index = 0; + error->message = tal_strdup(error, ""); + + for (size_t i = 0; i < tal_count(tokens); i++) { + if (tokens[i]->type == TOK_SEGMENT) { + dump_token_shallow(&error->message, tokens[i]->left, + ""); + dump_token_shallow(&error->message, tokens[i]->middle, + " -> "); + if (tokens[i]->right) + dump_token_shallow(&error->message, + tokens[i]->right, " -> "); + } + else { + tal_append_fmt(&error->message, "Invalid token!! "); + dump_token_shallow(&error->message, tokens[i], ""); + } + tal_append_fmt(&error->message, "\n"); + } + + return error; +} + +static struct splice_script_error *new_error_offset(const tal_t *ctx, + enum splice_script_error_type type, + struct token *token, + const char *phase, + int index_offset) +{ + struct splice_script_error *error = tal(ctx, struct splice_script_error); + + error->type = type; + error->script_index = token->script_index + index_offset; + error->message = tal_strdup(error, ""); + error->phase = phase; + + return error; +} + +static struct splice_script_error *new_error(const tal_t *ctx, + enum splice_script_error_type type, + struct token *token, + const char *phase) +{ + return new_error_offset(ctx, type, token, phase, 0); +} + +static char *context_snippet(const tal_t *ctx, + const char *script, + struct splice_script_error *error) +{ + const char *start = script + error->script_index; + const char *last = start; + const char *end = script + strlen(script); + char *str; + + for (size_t i = 0; i < CODE_SNIPPET_PADDING && start-1 >= script && start[-1] >= ' '; i++) + start--; + + for (size_t i = 0; i < CODE_SNIPPET_PADDING && last+1 < end && last[1] >= ' '; i++) + last++; + + str = tal_strndup(ctx, start, last - start); + + tal_append_fmt(&str, "\n"); + + for (const char *ptr = start; ptr < script + error->script_index; ptr++) + tal_append_fmt(&str, " "); + + tal_append_fmt(&str, "^\n"); + + if (error->message && strlen(error->message)) + tal_append_fmt(&str, "%s\n", error->message); + + if (error->phase) + tal_append_fmt(&str, "Compiler phase: %s\n", error->phase); + + return str; +} + +char *splice_script_compiler_error(const tal_t *ctx, + const char *script, + struct splice_script_error *error) +{ + switch (error->type) { + case INTERNAL_ERROR: + return tal_fmt(ctx, "Internal error\n%s", + context_snippet(ctx, script, error)); + case INVALID_TOKEN: + return tal_fmt(ctx, "Invalid token error\n%s", + context_snippet(ctx, script, error)); + case DEBUG_DUMP: + return tal_fmt(ctx, "Token Dump:\n%s", error->message); + case TOO_MANY_PIPES: + return tal_fmt(ctx, "Too many '%c' symbols near here\n%s", + PIPE_SYMBOL, + context_snippet(ctx, script, error)); + case TOO_MANY_ATS: + return tal_fmt(ctx, "Too many '%c' symbols near here\n%s", + AT_SYMBOL, + context_snippet(ctx, script, error)); + case TOO_MANY_COLONS: + return tal_fmt(ctx, "Too many '%c' symbols near here\n%s", + COLON_SYMBOL, + context_snippet(ctx, script, error)); + case INVALID_NODEID: + return tal_fmt(ctx, "Invalid node id\n%s", + context_snippet(ctx, script, error)); + case INVALID_CHANID: + return tal_fmt(ctx, "Invalid channel id\n%s", + context_snippet(ctx, script, error)); + case WRONG_NUM_SEGMENT_CHUNKS: + return tal_fmt(ctx, "Segments must have one or two \"" + ARROW_SYMBOL "\" symbols\n%s", + context_snippet(ctx, script, error)); + case MISSING_ARROW: + return tal_fmt(ctx, "Segments elements must be seperated by \"" + ARROW_SYMBOL "\" symbols\n%s", + context_snippet(ctx, script, error)); + case NO_MATCHING_NODES: + return tal_fmt(ctx, "No matching nodes for node query\n%s", + context_snippet(ctx, script, error)); + case CHAN_INDEX_ON_WILDCARD_NODE: + return tal_fmt(ctx, "Node wildcard matches must use an index," + " '%c', or '%c' after the '%c'\n%s", + Q_SYMBOL, WILD_SYMBOL, COLON_SYMBOL, + context_snippet(ctx, script, error)); + case CHANQUERY_TYPEERROR: + return tal_fmt(ctx, "Channel query has invalid type(s)\n%s", + context_snippet(ctx, script, error)); + case CHAN_INDEX_NOT_FOUND: + return tal_fmt(ctx, "Channel index not found for node\n%s", + context_snippet(ctx, script, error)); + case NODE_ID_MULTIMATCH: + return tal_fmt(ctx, "Node id matched multiple nodes, specify" + " more characters to be more specific\n%s", + context_snippet(ctx, script, error)); + case NODE_ID_CHAN_OVERMATCH: + return tal_fmt(ctx, "Node id matched a channel id, specify" + " more characters to be more specific\n%s", + context_snippet(ctx, script, error)); + case CHAN_ID_MULTIMATCH: + return tal_fmt(ctx, "Channel id matched multiple channels," + " specify more characters to be more specific" + "\n%s", + context_snippet(ctx, script, error)); + case CHAN_ID_NODE_OVERMATCH: + return tal_fmt(ctx, "Channel id matched a node id, specify" + " more characters to be more specific\n%s", + context_snippet(ctx, script, error)); + case NODE_ID_NO_UNUSED: + return tal_fmt(ctx, "No unused channels for node id. Other" + " channel queries already claimed all channels" + "\n%s", + context_snippet(ctx, script, error)); + case DOUBLE_MIDDLE_OP: + return tal_fmt(ctx, "Duplicate channel or address equivilent." + " Each line must contain only one\n%s", + context_snippet(ctx, script, error)); + case MISSING_MIDDLE_OP: + return tal_fmt(ctx, "Missing channel or address equivilent." + " Each line must contain one\n%s", + context_snippet(ctx, script, error)); + case MISSING_AMOUNT_OP: + return tal_fmt(ctx, "Missing sat amount. A sat amount is" + " required here\n%s", + context_snippet(ctx, script, error)); + case MISSING_AMOUNT_OR_WILD_OP: + return tal_fmt(ctx, "Missing sat amount. A sat amount or '%c'" + " is required here\n%s", WILD_SYMBOL, + context_snippet(ctx, script, error)); + case ZERO_AMOUNTS: + return tal_fmt(ctx, "Each line must specify a non-zero amount" + " or lease request. This line specifies none of" + " these\n%s", + context_snippet(ctx, script, error)); + case MISSING_PERCENT: + return tal_fmt(ctx, "A percentage value is required here (ie." + " 1.5%%)\n%s", + context_snippet(ctx, script, error)); + case LEASE_AMOUNT_ZERO: + return tal_fmt(ctx, "Lease specified without a non-zero amount." + " Must specify a non-zero amount.\n%s", + context_snippet(ctx, script, error)); + } +} + +static bool is_whitespace(char c) +{ + return isspace(c); +} + +static struct splice_script_error *clean_whitespace(const tal_t *ctx, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, tal_count(input)); + size_t n = 0; + + for (size_t i = 0; i < tal_count(input); i++) + if (input[i]->type != TOK_CHAR || !is_whitespace(input[i]->c)) + tokens[n++] = tal_steal(tokens, input[i]); + + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +/* Returns point in str that starts match of suffix. */ +static char *find_suffix(char *str, char *suffix) +{ + char *ptr; + + if (strlen(str) < strlen(suffix)) + return false; + + ptr = str + strlen(str) - strlen(suffix); + + if (streq(ptr, suffix)) + return ptr; + + return NULL; +} + +static struct splice_script_error *find_arrows_and_strs(const tal_t *ctx, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, tal_count(input)); + struct token *token = NULL; + size_t n = 0; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_DELIMITER: + if (token) + tokens[n++] = token; + token = NULL; + tokens[n++] = tal_steal(tokens, input[i]); + break; + case TOK_CHAR: + if (!token) { + token = new_token(tokens, TOK_STR, + input[i]->script_index); + token->str = tal_strdup(token, ""); + } + tal_append_fmt(&token->str, "%c", input[i]->c); + + if (find_suffix(token->str, ARROW_SYMBOL)) { + *find_suffix(token->str, ARROW_SYMBOL) = 0; + + if (*token->str) + tokens[n++] = token; + else + tal_free(token); + token = NULL; + + tokens[n++] = new_token(tokens, + TOK_ARROW, + input[i]->script_index); + } + break; + case TOK_ARROW: + case TOK_STR: + case TOK_PIPE: + case TOK_ATSYM: + case TOK_COLON: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASERATE: + case TOK_LEASEREQ: + case TOK_SEGMENT: + return new_error(ctx, INVALID_TOKEN, input[i], + "arrows"); + } + } + + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +static struct splice_script_error *process_top_seperators(const tal_t *ctx, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, + tal_count(input) * 3); + char *split_point; + size_t script_index; + size_t n = 0; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_CHAR: + return new_error(ctx, INVALID_TOKEN, input[i], + "top_seperators"); + case TOK_DELIMITER: + case TOK_ARROW: + tokens[n++] = tal_steal(tokens, input[i]); + break; + case TOK_STR: + if ((split_point = strchr(input[i]->str, + PIPE_SYMBOL))) { + if (split_point != strrchr(input[i]->str, + PIPE_SYMBOL)) + return new_error_offset(ctx, + TOO_MANY_PIPES, + input[i], + "top_seperators", + split_point - input[i]->str); + + *split_point = 0; + tokens[n++] = tal_steal(tokens, input[i]); + + script_index = input[i]->script_index; + script_index += (split_point - input[i]->str); + + tokens[n++] = new_token(tokens, TOK_PIPE, + script_index); + + script_index++; + + tokens[n] = new_token(tokens, TOK_STR, + script_index); + tokens[n++]->str = split_point + 1; + + } else if ((split_point = strchr(input[i]->str, + COLON_SYMBOL))) { + if (split_point != strrchr(input[i]->str, + COLON_SYMBOL)) + return new_error_offset(ctx, + TOO_MANY_COLONS, + input[i], + "top_seperators", + split_point - input[i]->str); + + *split_point = 0; + tokens[n++] = tal_steal(tokens, input[i]); + + script_index = input[i]->script_index; + script_index += (split_point - input[i]->str); + + tokens[n++] = new_token(tokens, TOK_COLON, + script_index); + + script_index++; + + tokens[n] = new_token(tokens, TOK_STR, + script_index); + tokens[n++]->str = split_point + 1; + } else { + tokens[n++] = tal_steal(tokens, input[i]); + } + break; + case TOK_PIPE: + case TOK_ATSYM: + case TOK_COLON: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASERATE: + case TOK_LEASEREQ: + case TOK_SEGMENT: + return new_error(ctx, INVALID_TOKEN, input[i], + "top_seperators"); + } + } + + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +static struct splice_script_error *process_2nd_seperators(const tal_t *ctx, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, + tal_count(input) * 3); + char *split_point; + size_t script_index; + size_t n = 0; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_CHAR: + return new_error(ctx, INVALID_TOKEN, input[i], + "2nd_separators"); + case TOK_DELIMITER: + case TOK_ARROW: + case TOK_PIPE: + case TOK_COLON: + tokens[n++] = tal_steal(tokens, input[i]); + break; + case TOK_STR: + if ((split_point = strchr(input[i]->str, + AT_SYMBOL))) { + if (split_point != strrchr(input[i]->str, + AT_SYMBOL)) + return new_error_offset(ctx, + TOO_MANY_ATS, + input[i], + "2nd_separators", + split_point - input[i]->str); + + *split_point = 0; + tokens[n++] = tal_steal(tokens, input[i]); + + script_index = input[i]->script_index; + script_index += (split_point - input[i]->str); + + tokens[n++] = new_token(tokens, TOK_ATSYM, + script_index); + + script_index++; + + tokens[n] = new_token(tokens, TOK_STR, + script_index); + tokens[n++]->str = split_point + 1; + + } else { + tokens[n++] = tal_steal(tokens, input[i]); + } + break; + case TOK_ATSYM: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASERATE: + case TOK_LEASEREQ: + case TOK_SEGMENT: + return new_error(ctx, INVALID_TOKEN, input[i], + "2nd_separators"); + } + } + + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +static const char *segwit_addr_net_decode(int *witness_version, + uint8_t *witness_program, + size_t *witness_program_len, + const char *addrz, + const struct chainparams *chainparams) +{ + if (segwit_addr_decode(witness_version, witness_program, + witness_program_len, chainparams->onchain_hrp, + addrz)) + return chainparams->onchain_hrp; + else + return NULL; +} + +static bool is_bitcoin_address(const char *address) +{ + struct ripemd160 addr; + int witness_version; + /* segwit_addr_net_decode requires a buffer of size 40, and will + * not write to the buffer if the address is too long, so a buffer + * of fixed size 40 will not overflow. */ + uint8_t witness_program[40]; + size_t witness_program_len; + + const char *bech32; + + u8 addr_version; + + if (ripemd160_from_base58(&addr_version, &addr, + address, strlen(address))) { + if (addr_version == chainparams->p2pkh_version) { + return true; + } else if (addr_version == chainparams->p2sh_version) { + return true; + } + return false; + } + + bech32 = segwit_addr_net_decode(&witness_version, witness_program, + &witness_program_len, address, + chainparams); + if (bech32) { + bool witness_ok; + + /* Only V0 has restricted lengths of witness programs */ + if (witness_version == 0) { + witness_ok = (witness_program_len == 20 || + witness_program_len == 32); + } else + witness_ok = true; + + if (!witness_ok) + return false; + + if (!streq(bech32, chainparams->onchain_hrp)) + return false; + + return true; + } + + return false; +} + +/* Checks token->str for a short node id and auto completes it. */ +static bool autocomplete_node_id(struct token *token, + struct splice_script_chan *channels, + size_t channels_count, + bool *multiple_nodes, + bool *chan_id_overmatch) +{ + struct node_id *match; + struct node_id candidate; + size_t len = strlen(token->str) / 2; + + *multiple_nodes = false; + *chan_id_overmatch = false; + + if (strlen(token->str) < NODEID_MIN_CHARS) + return false; + if (len > PUBKEY_CMPR_LEN) + return false; + if (!hex_decode(token->str, len * 2, + candidate.k, len)) + return false; + + match = NULL; + for (size_t i = 0; i < channels_count; i++) { + if (memeq(candidate.k, len, channels[i].node_id.k, len)) { + /* must not match multiple node ids */ + if (match && !node_id_eq(match, &channels[i].node_id)) { + *multiple_nodes = true; + return true; + } + match = &channels[i].node_id; + } + /* nodeid query must *not* match any channel ids */ + if (memeq(candidate.k, len, channels[i].chan_id.id, len)) + *chan_id_overmatch = true; + } + + if (!match) + return false; + + assert(!token->node_id); + token->node_id = tal_dup(token, struct node_id, match); + + return true; +} + +static bool autocomplete_chan_id(struct token *token, + struct splice_script_chan *channels, + size_t channels_count, + bool *multiple_chans, + bool *node_id_overmatch) +{ + struct channel_id *match; + struct channel_id candidate; + size_t len = strlen(token->str) / 2; + + *multiple_chans = false; + *node_id_overmatch = false; + + if (strlen(token->str) < NODEID_MIN_CHARS) + return false; + if (len > PUBKEY_CMPR_LEN) + return false; + if (!hex_decode(token->str, len * 2, + candidate.id, len)) + return false; + + match = NULL; + for (size_t i = 0; i < channels_count; i++) { + if (memeq(candidate.id, len, channels[i].chan_id.id, len)) { + /* must not match multiple node ids */ + if (match && !channel_id_eq(match, &channels[i].chan_id)) { + *multiple_chans = true; + return true; + } + match = &channels[i].chan_id; + } + /* nodeid query must *not* match any channel ids */ + if (memeq(candidate.id, len, channels[i].node_id.k, len)) + *node_id_overmatch = true; + } + + if (!match) + return false; + + assert(!token->chan_id); + token->chan_id = tal_dup(token, struct channel_id, match); + + return true; +} + +static struct splice_script_error *data_types(const tal_t *ctx, + struct splice_script_chan *channels, + size_t channels_count, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, tal_count(input)); + char *whole, *decimal; + char *sat_candidate; + struct amount_sat amount_sat; + bool multiple = false; + bool overmatch = false; + size_t n = 0; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_CHAR: + return new_error(ctx, INVALID_TOKEN, input[i], + "data_types"); + case TOK_DELIMITER: + case TOK_ARROW: + case TOK_PIPE: + case TOK_COLON: + case TOK_ATSYM: + tokens[n++] = tal_steal(tokens, input[i]); + break; + case TOK_STR: + if (tal_strreg(ctx, input[i]->str, PERCENT_REGEX, + &whole, &decimal)) { + input[i]->ppm = (u32)(10000 * atoll(whole) + + 100 * atoll(decimal)); + input[i]->type = TOK_PERCENT; + } else if (tal_strreg(ctx, input[i]->str, Q_REGEX)) { + input[i]->type = TOK_QUESTION; + } else if (tal_strreg(ctx, input[i]->str, WILD_REGEX)) { + input[i]->type = TOK_WILDCARD; + } else if (tal_strreg(ctx, input[i]->str, + NODEID_REGEX)) { + input[i]->type = TOK_NODEID; + input[i]->node_id = tal(input[i], + struct node_id); + if (!node_id_from_hexstr(input[i]->str, + strlen(input[i]->str), + input[i]->node_id)) + return new_error(ctx, INVALID_NODEID, + input[i], + "data_types"); + } else if (is_bitcoin_address(input[i]->str)) { + input[i]->type = TOK_BTCADDR; + } else if (tal_strreg(ctx, input[i]->str, + CHANID_REGEX)) { + input[i]->type = TOK_CHANID; + input[i]->chan_id = tal(input[i], + struct channel_id); + if (!hex_decode(input[i]->str, + strlen(input[i]->str), + input[i]->chan_id, + 32)) + return new_error(ctx, INVALID_CHANID, + input[i], + "data_types"); + } else if (tal_strreg(ctx, input[i]->str, + WALLET_REGEX)) { + input[i]->type = TOK_WALLET; + } else if (autocomplete_node_id(input[i], channels, + channels_count, + &multiple, + &overmatch)) { + if (multiple) + return new_error(ctx, + NODE_ID_MULTIMATCH, + input[i], + "data_types"); + + if (overmatch) + return new_error(ctx, + NODE_ID_CHAN_OVERMATCH, + input[i], + "data_types"); + input[i]->type = TOK_NODEID; + } else if (autocomplete_chan_id(input[i], channels, + channels_count, + &multiple, + &overmatch)) { + if (multiple) + return new_error(ctx, + CHAN_ID_MULTIMATCH, + input[i], + "data_types"); + + if (overmatch) + return new_error(ctx, + CHAN_ID_NODE_OVERMATCH, + input[i], + "data_types"); + input[i]->type = TOK_CHANID; + } else { + + /* Parse shorthand sat formats */ + sat_candidate = input[i]->str; + + if (tal_strreg(ctx, sat_candidate, SATM_REGEX, + &whole, &decimal)) { + sat_candidate = tal_fmt(input[i], + "%"PRIu64, + (u64)(1000000 * atoll(whole) + + 10000 * atoll(decimal))); + } else if (tal_strreg(ctx, sat_candidate, SATK_REGEX, + &whole, &decimal)) { + sat_candidate = tal_fmt(input[i], + "%"PRIu64, + (u64)(1000 * atoll(whole) + + 10 * atoll(decimal))); + } + + if (parse_amount_sat(&amount_sat, sat_candidate, + strlen(sat_candidate))) { + input[i]->type = TOK_SATS; + input[i]->amount_sat = amount_sat; + } + + if (sat_candidate != input[i]->str) + tal_free(sat_candidate); + } + + tokens[n++] = tal_steal(tokens, input[i]); + break; + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASERATE: + case TOK_LEASEREQ: + case TOK_SEGMENT: + return new_error(ctx, INVALID_TOKEN, input[i], + "data_types"); + } + } + + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +static struct splice_script_error *compress_top_operands(const tal_t *ctx, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, tal_count(input)); + size_t n = 0; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_CHAR: + return new_error(ctx, INVALID_TOKEN, input[i], + "operands"); + case TOK_DELIMITER: + case TOK_ARROW: + case TOK_PIPE: + case TOK_STR: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + tokens[n++] = tal_steal(tokens, input[i]); + break; + case TOK_ATSYM: + if (!n || i + 1 > tal_count(input)) + return new_error(ctx, INVALID_TOKEN, input[i], + "operands"); + input[i]->type = TOK_LEASERATE; + input[i]->right = tal_steal(input[i], input[i+1]); + tokens[n-1]->right = tal_steal(tokens[n-1], input[i]); + i++; + break; + case TOK_COLON: + if (!n || i + 1 > tal_count(input)) + return new_error(ctx, INVALID_TOKEN, input[i], + "operands"); + input[i]->type = TOK_CHANQUERY; + input[i]->left = tal_steal(input[i], tokens[n-1]); + input[i]->right = tal_steal(input[i], input[i+1]); + tokens[n-1] = tal_steal(tokens, input[i]); + i++; + break; + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASERATE: + case TOK_LEASEREQ: + case TOK_SEGMENT: + return new_error(ctx, INVALID_TOKEN, input[i], + "operands"); + } + } + + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +static struct splice_script_error *compress_2nd_operands(const tal_t *ctx, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, tal_count(input)); + size_t n = 0; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_CHAR: + case TOK_ATSYM: + case TOK_COLON: + return new_error(ctx, INVALID_TOKEN, input[i], + "2nd_operands"); + case TOK_DELIMITER: + case TOK_ARROW: + case TOK_STR: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASERATE: + tokens[n++] = tal_steal(tokens, input[i]); + break; + case TOK_PIPE: + if (!n || i + 1 > tal_count(input)) + return new_error(ctx, INVALID_TOKEN, input[i], + "2nd_operands"); + input[i]->type = TOK_LEASEREQ; + input[i]->right = tal_steal(input[i], input[i+1]); + tokens[n-1]->right = tal_steal(tokens[n-1], input[i]); + i++; + break; + case TOK_LEASEREQ: + case TOK_SEGMENT: + return new_error(ctx, INVALID_TOKEN, input[i], + "2nd_operands"); + } + } + + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +static bool matches_chan_id(struct token *token, struct channel_id chan_id) +{ + if (token->chan_id && channel_id_eq(token->chan_id, &chan_id)) + return true; + + if (token->left && matches_chan_id(token->left, chan_id)) + return true; + + if (token->middle && matches_chan_id(token->middle, chan_id)) + return true; + + if (token->right && matches_chan_id(token->right, chan_id)) + return true; + + return false; +} + +/* Searches through both tokensA and tokensB. */ +static struct node_id *first_node_with_unused_chan(const tal_t *ctx, + struct splice_script_chan *channels, + size_t channels_count, + struct token **tokensA, + size_t a_size, + struct token **tokensB, + size_t b_size) +{ + for (size_t i = 0; i < channels_count; i++) { + bool used = false; + for (size_t j = 0; j < a_size; j++) + if (matches_chan_id(tokensA[j], channels[i].chan_id)) + used = true; + for (size_t k = 0; k < b_size; k++) + if (matches_chan_id(tokensB[k], channels[i].chan_id)) + used = true; + if (!used) + return tal_dup(ctx, struct node_id, + &channels[i].node_id); + } + + return NULL; +} + +static struct channel_id *chan_for_node_index(const tal_t *ctx, + struct splice_script_chan *channels, + size_t channels_count, + struct node_id node_id, + size_t channel_index) +{ + for (size_t i = 0; i < channels_count; i++) + if (node_id_eq(&node_id, &channels[i].node_id)) + if (!channel_index--) + return tal_dup(ctx, struct channel_id, + &channels[i].chan_id); + return NULL; +} + +static struct channel_id **unused_chans(const tal_t *ctx, + struct splice_script_chan *channels, + size_t channels_count, + struct token **tokensA, + size_t a_size, + struct token **tokensB, + size_t b_size) +{ + struct channel_id **result = tal_arr(ctx, struct channel_id*, 0); + + for (size_t i = 0; i < channels_count; i++) { + bool used = false; + for (size_t j = 0; j < a_size; j++) + if (matches_chan_id(tokensA[j], channels[i].chan_id)) + used = true; + for (size_t k = 0; k < b_size; k++) + if (matches_chan_id(tokensB[k], channels[i].chan_id)) + used = true; + if (!used) + tal_arr_expand(&result, tal_dup(result, + struct channel_id, + &channels[i].chan_id)); + } + + if (!tal_count(result)) + result = tal_free(result); + + return result; +} + +static struct channel_id **unused_chans_for_node(const tal_t *ctx, + struct splice_script_chan *channels, + size_t channels_count, + struct token **tokensA, + size_t a_size, + struct token **tokensB, + size_t b_size, + struct node_id node_id) +{ + struct channel_id **result = tal_arr(ctx, struct channel_id*, 0); + + for (size_t i = 0; i < channels_count; i++) { + bool used = false; + if (!node_id_eq(&node_id, &channels[i].node_id)) + continue; + for (size_t j = 0; j < a_size; j++) + if (matches_chan_id(tokensA[j], channels[i].chan_id)) + used = true; + for (size_t k = 0; k < b_size; k++) + if (matches_chan_id(tokensB[k], channels[i].chan_id)) + used = true; + if (!used) + tal_arr_expand(&result, tal_dup(result, + struct channel_id, + &channels[i].chan_id)); + } + + if (!tal_count(result)) + result = tal_free(result); + + return result; +} + +static struct splice_script_error *resolve_channel_ids(const tal_t *ctx, + struct splice_script_chan *channels, + size_t channels_count, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, tal_count(input)); + struct node_id *node_id; + struct channel_id *chan_id; + struct channel_id **chan_ids; + struct token *token_itr; + size_t n = 0; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_CHAR: + case TOK_ATSYM: + case TOK_COLON: + case TOK_PIPE: + return new_error(ctx, INVALID_TOKEN, input[i], + "2nd_operands"); + case TOK_LEASERATE: + case TOK_ARROW: + case TOK_STR: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_LEASEREQ: + case TOK_DELIMITER: + tokens[n++] = tal_steal(tokens, input[i]); + break; + case TOK_CHANQUERY: + if (!input[i]->left || !input[i]->right) + return new_error(ctx, INVALID_TOKEN, input[i], + "resolve_channel_ids"); + /* If user specifies *:? it is same as ?:? */ + if (input[i]->left->type == TOK_WILDCARD + && input[i]->right->type == TOK_QUESTION) + input[i]->left->type = TOK_QUESTION; + + if (input[i]->left->type == TOK_QUESTION) { + node_id = first_node_with_unused_chan(ctx, + channels, + channels_count, + input, + tal_count(input), + tokens, + n); + if (!node_id) + return new_error(ctx, NO_MATCHING_NODES, + input[i], + "resolve_channel_ids"); + input[i]->left->type = TOK_NODEID; + input[i]->left->node_id = tal_steal(input[i]->left, + node_id); + } + + if (input[i]->left->type == TOK_NODEID + && input[i]->right->type == TOK_SATS) { + chan_id = chan_for_node_index(ctx, channels, + channels_count, + *input[i]->left->node_id, + input[i]->right->amount_sat.satoshis); /* Raw: We use satoshis as index values in splice script */ + if (!chan_id) + return new_error(ctx, + CHAN_INDEX_NOT_FOUND, + input[i], + "resolve_channel_ids"); + input[i]->type = TOK_CHANID; + input[i]->chan_id = tal_steal(input[i], chan_id); + input[i]->left = tal_free(input[i]->left); + input[i]->right = tal_free(input[i]->right); + tokens[n++] = tal_steal(tokens, input[i]); + } + else if (input[i]->left->type == TOK_NODEID) { + chan_ids = unused_chans_for_node(ctx, channels, + channels_count, + input, + tal_count(input), + tokens, n, + *input[i]->left->node_id); + if (!tal_count(chan_ids)) + return new_error(ctx, NODE_ID_NO_UNUSED, + input[i], + "resolve_channel_ids"); + if (input[i]->right->type == TOK_QUESTION) { + input[i]->type = TOK_CHANID; + input[i]->chan_id = tal_steal(input[i], + chan_ids[0]); + input[i]->left = tal_free(input[i]->left); + input[i]->right = tal_free(input[i]->right); + tokens[n++] = tal_steal(tokens, input[i]); + tal_free(chan_ids); + } else if (input[i]->right->type == TOK_WILDCARD) { + input[i]->type = TOK_MULTI_CHANID; + input[i]->left = tal_free(input[i]->left); + input[i]->right = tal_free(input[i]->right); + token_itr = input[i]; + for (size_t j = 0; j < tal_count(chan_ids); j++) { + token_itr->right = new_token(token_itr, + TOK_CHANID, + token_itr->script_index); + token_itr->right->chan_id = tal_dup(token_itr->right, + struct channel_id, + chan_ids[j]); + token_itr = token_itr->right; + } + tokens[n++] = tal_steal(tokens, input[i]); + tal_free(chan_ids); + } else { + tal_free(chan_ids); + return new_error(ctx, + CHAN_INDEX_ON_WILDCARD_NODE, + input[i], + "resolve_channel_ids"); + } + } + else if (input[i]->left->type == TOK_WILDCARD) { + if (input[i]->right->type != TOK_WILDCARD) + return new_error(ctx, + CHAN_INDEX_ON_WILDCARD_NODE, + input[i], + "resolve_channel_ids"); + chan_ids = unused_chans(ctx, channels, + channels_count, + input, + tal_count(input), + tokens, n); + input[i]->type = TOK_MULTI_CHANID; + input[i]->left = tal_free(input[i]->left); + input[i]->right = tal_free(input[i]->right); + token_itr = input[i]; + for (size_t j = 0; j < tal_count(chan_ids); j++) { + token_itr->right = new_token(token_itr, + TOK_CHANID, + token_itr->script_index); + token_itr->right->chan_id = tal_dup(token_itr->right, + struct channel_id, + chan_ids[j]); + token_itr = token_itr->right; + } + tokens[n++] = tal_steal(tokens, input[i]); + tal_free(chan_ids); + } + else { + return new_error(ctx, + CHANQUERY_TYPEERROR, + input[i], + "resolve_channel_ids"); + } + break; + case TOK_SEGMENT: + case TOK_MULTI_CHANID: + return new_error(ctx, INVALID_TOKEN, input[i], + "resolve_channel_ids"); + } + } + + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +static bool is_valid_middle(struct token *token) +{ + switch(token->type) { + case TOK_CHANID: + case TOK_BTCADDR: + case TOK_WALLET: + return true; + case TOK_CHAR: + case TOK_ATSYM: + case TOK_COLON: + case TOK_LEASERATE: + case TOK_ARROW: + case TOK_PIPE: + case TOK_STR: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_NODEID: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASEREQ: + case TOK_DELIMITER: + case TOK_SEGMENT: + return false; + } +} + +static struct splice_script_error *make_segments(const tal_t *ctx, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, tal_count(input)); + size_t n = 0; + size_t next_consumable = 0; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_CHAR: + case TOK_ATSYM: + case TOK_COLON: + case TOK_LEASERATE: + return new_error(ctx, INVALID_TOKEN, input[i], + "segments"); + case TOK_ARROW: + case TOK_PIPE: + case TOK_STR: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASEREQ: + break; + case TOK_DELIMITER: + if (i == 0 || input[i-1]->type == TOK_SEGMENT) { + next_consumable = i+1; + break; + } + if (i - next_consumable == 3) { + if (input[next_consumable+1]->type != TOK_ARROW) + return new_error(ctx, MISSING_ARROW, + input[next_consumable+1], + "segments"); + input[i]->type = TOK_SEGMENT; + input[i]->left = tal_steal(input[i], + input[next_consumable]); + input[i]->middle = tal_steal(input[i], + input[next_consumable+2]); + tokens[n++] = tal_steal(tokens, input[i]); + next_consumable = i+1; + } + else if (i - next_consumable == 5) { + if (input[next_consumable+1]->type != TOK_ARROW) + return new_error(ctx, MISSING_ARROW, + input[next_consumable+1], + "segments"); + if (input[next_consumable+3]->type != TOK_ARROW) + return new_error(ctx, MISSING_ARROW, + input[next_consumable+3], + "segments"); + input[i]->type = TOK_SEGMENT; + input[i]->left = tal_steal(input[i], + input[next_consumable]); + input[i]->middle = tal_steal(input[i], + input[next_consumable+2]); + input[i]->right = tal_steal(input[i], + input[next_consumable+4]); + tokens[n++] = tal_steal(tokens, input[i]); + next_consumable = i+1; + } + else { + return new_error(ctx, WRONG_NUM_SEGMENT_CHUNKS, + input[i], "segments"); + } + + /* Move middle OP to middle and validate */ + if (!tokens[n-1]->right) { + if (is_valid_middle(tokens[n-1]->left)) { + if (is_valid_middle(tokens[n-1]->middle)) + return new_error(ctx, + DOUBLE_MIDDLE_OP, + tokens[n-1]->middle, + "validate_and_clean"); + tokens[n-1]->right = tokens[n-1]->middle; + tokens[n-1]->middle = tokens[n-1]->left; + tokens[n-1]->left = new_token(tokens[n-1], + TOK_SATS, + tokens[n-1]->script_index); + tokens[n-1]->left->amount_sat = AMOUNT_SAT(0); + + } else if (is_valid_middle(tokens[n-1]->middle)) { + tokens[n-1]->right = new_token(tokens[n-1], + TOK_SATS, + tokens[n-1]->script_index); + tokens[n-1]->right->amount_sat = AMOUNT_SAT(0); + } else { + return new_error(ctx, MISSING_MIDDLE_OP, + tokens[n-1]->middle, + "validate_and_clean"); + } + } + if (tokens[n-1]->left->type == TOK_STR + && !strlen(tokens[n-1]->left->str)) { + tokens[n-1]->left->type = TOK_SATS; + tokens[n-1]->left->amount_sat = AMOUNT_SAT(0); + } + if (tokens[n-1]->right->type == TOK_STR + && !strlen(tokens[n-1]->right->str)) { + tokens[n-1]->right->type = TOK_SATS; + tokens[n-1]->right->amount_sat = AMOUNT_SAT(0); + } + break; + case TOK_SEGMENT: + return new_error(ctx, INVALID_TOKEN, input[i], + "segments"); + } + } + + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +static struct splice_script_error *expand_multichans(const tal_t *ctx, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, 0); + struct token *token_itr; + struct token *token; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_CHAR: + case TOK_ATSYM: + case TOK_COLON: + case TOK_LEASERATE: + case TOK_ARROW: + case TOK_PIPE: + case TOK_STR: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASEREQ: + case TOK_DELIMITER: + return new_error(ctx, INVALID_TOKEN, input[i], + "segments"); + case TOK_SEGMENT: + if (input[i]->middle->type == TOK_MULTI_CHANID) { + token_itr = input[i]->middle->right; + while (token_itr) { + token = tal_dup(tokens, struct token, + input[i]); + /* left & right are shared across token + * instances so we steal them onto the + * array itself */ + tal_steal(tokens, token->left); + tal_steal(tokens, token->right); + + token->middle = tal_steal(token, + token_itr); + tal_arr_expand(&tokens, token); + + token_itr = token_itr->right; + } + } else { + tal_arr_expand(&tokens, + tal_steal(tokens, input[i])); + } + break; + } + } + + tal_free(input); + *tokens_inout = tokens; + return NULL; +} + +static bool is_valid_amount(struct token *token) +{ + switch(token->type) { + case TOK_SATS: + case TOK_WILDCARD: + return true; + case TOK_CHAR: + case TOK_ATSYM: + case TOK_COLON: + case TOK_LEASERATE: + case TOK_ARROW: + case TOK_PIPE: + case TOK_STR: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASEREQ: + case TOK_DELIMITER: + case TOK_SEGMENT: + return false; + } +} + +static bool is_valid_nonzero_amount(struct token *token) +{ + if (token->type == TOK_SATS && amount_sat_zero(token->amount_sat)) + return false; + + return is_valid_amount(token); +} + +static struct splice_script_error *validate_and_clean(const tal_t *ctx, + struct token ***tokens_inout) +{ + struct token **input = *tokens_inout; + struct token **tokens = tal_arr(ctx, struct token *, tal_count(input)); + size_t n = 0; + struct channel_id *chan_ids = tal_arr(ctx, struct channel_id, 0); + struct token *lease, *leaserate; + + for (size_t i = 0; i < tal_count(input); i++) { + switch(input[i]->type) { + case TOK_CHAR: + case TOK_ATSYM: + case TOK_COLON: + case TOK_LEASERATE: + case TOK_ARROW: + case TOK_PIPE: + case TOK_STR: + case TOK_SATS: + case TOK_PERCENT: + case TOK_QUESTION: + case TOK_WILDCARD: + case TOK_CHANID: + case TOK_WALLET: + case TOK_NODEID: + case TOK_BTCADDR: + case TOK_CHANQUERY: + case TOK_MULTI_CHANID: + case TOK_LEASEREQ: + case TOK_DELIMITER: + return new_error(ctx, INVALID_TOKEN, input[i], + "segments"); + case TOK_SEGMENT: + if (!is_valid_amount(input[i]->left)) + return new_error(ctx, MISSING_AMOUNT_OR_WILD_OP, + input[i]->left, + "validate_and_clean"); + if (!is_valid_amount(input[i]->right)) + return new_error(ctx, MISSING_AMOUNT_OR_WILD_OP, + input[i]->right, + "validate_and_clean"); + lease = input[i]->left->right; + leaserate = NULL; + if (lease) { + if (lease->type != TOK_LEASEREQ) + return new_error(ctx, INTERNAL_ERROR, + lease, + "validate_and_clean"); + if (lease->right->type != TOK_SATS) + return new_error(ctx, LEASE_AMOUNT_ZERO, + lease, + "validate_and_clean"); + lease->amount_sat = lease->right->amount_sat; + if (amount_sat_zero(lease->amount_sat)) + return new_error(ctx, LEASE_AMOUNT_ZERO, + lease->right, + "validate_and_clean"); + leaserate = lease->right->right; + } + if (leaserate) { + if (leaserate->type != TOK_LEASERATE) + return new_error(ctx, INTERNAL_ERROR, + leaserate, + "validate_and_clean"); + if (leaserate->right->type != TOK_PERCENT) + return new_error(ctx, MISSING_PERCENT, + leaserate, + "validate_and_clean"); + lease->ppm = leaserate->right->ppm; + } + if (is_valid_nonzero_amount(input[i]->left) + && is_valid_nonzero_amount(input[i]->right) + && !lease) + return new_error(ctx, ZERO_AMOUNTS, + input[i]->left, + "validate_and_clean"); + tokens[n++] = tal_steal(tokens, input[i]); + break; + } + } + + tal_free(chan_ids); + tal_free(input); + tal_resize(&tokens, n); + *tokens_inout = tokens; + return NULL; +} + +static bool is_delimiter(char c) +{ + return c == '\n' || c == ';'; +} + +struct splice_script_error *parse_splice_script(const tal_t *ctx, + const char *script, + struct splice_script_chan *channels, + size_t channels_count, + struct json_stream **result) +{ + struct splice_script_error *error; + struct token **tokens = tal_arr(ctx, struct token *, + strlen(script) + 1); + + for (size_t i = 0; i < strlen(script); i++) { + tokens[i] = new_token(tokens, is_delimiter(script[i]) + ? TOK_DELIMITER + : TOK_CHAR, i); + tokens[i]->c = script[i]; + } + + /* We add a delimiter on the end to make life simple. */ + tokens[strlen(script)] = new_token(tokens, TOK_DELIMITER, + strlen(script)); + + if ((error = clean_whitespace(ctx, &tokens))) + return error; + + if ((error = find_arrows_and_strs(ctx, &tokens))) + return error; + + if ((error = process_top_seperators(ctx, &tokens))) + return error; + + if ((error = process_2nd_seperators(ctx, &tokens))) + return error; + + if ((error = data_types(ctx, channels, channels_count, &tokens))) + return error; + + if ((error = compress_top_operands(ctx, &tokens))) + return error; + + if ((error = compress_2nd_operands(ctx, &tokens))) + return error; + + if ((error = resolve_channel_ids(ctx, channels, channels_count, &tokens))) + return error; + + if ((error = make_segments(ctx, &tokens))) + return error; + + (void)expand_multichans; + if ((error = expand_multichans(ctx, &tokens))) + return error; + + (void)validate_and_clean; + if ((error = validate_and_clean(ctx, &tokens))) + return error; + + // Check for duplicate channel ids + // Check for show amounts, error if low + // Add wallet deposit if needed + // Validate lease req's + // Clean up lease req's + // General validation (no STRs, etc) + + // implement splice_init MAX feature + + (void)debug_dump; + return debug_dump(ctx, tokens); + (void)dump_segments; + // return dump_segments(ctx, tokens); +} diff --git a/common/splice_script.h b/common/splice_script.h new file mode 100644 index 000000000000..4bf851caa848 --- /dev/null +++ b/common/splice_script.h @@ -0,0 +1,61 @@ +#ifndef SPLICE_SCRIPT_H +#define SPLICE_SCRIPT_H +#include "config.h" +#include +#include +#include +#include + +enum splice_script_error_type { + INTERNAL_ERROR, + INVALID_TOKEN, + DEBUG_DUMP, + TOO_MANY_PIPES, + TOO_MANY_ATS, + TOO_MANY_COLONS, + INVALID_NODEID, + INVALID_CHANID, + WRONG_NUM_SEGMENT_CHUNKS, + MISSING_ARROW, + NO_MATCHING_NODES, + CHAN_INDEX_ON_WILDCARD_NODE, + CHAN_INDEX_NOT_FOUND, + CHANQUERY_TYPEERROR, + NODE_ID_MULTIMATCH, + NODE_ID_CHAN_OVERMATCH, + CHAN_ID_MULTIMATCH, + CHAN_ID_NODE_OVERMATCH, + NODE_ID_NO_UNUSED, + DOUBLE_MIDDLE_OP, + MISSING_MIDDLE_OP, + MISSING_AMOUNT_OP, + MISSING_AMOUNT_OR_WILD_OP, + ZERO_AMOUNTS, + MISSING_PERCENT, + LEASE_AMOUNT_ZERO, +}; + +struct splice_script_error { + enum splice_script_error_type type; + size_t script_index; /* where in `script` was error found */ + char *message; + const char *phase; +}; + +struct splice_script_chan { + struct node_id node_id; + struct channel_id chan_id; +}; + +/* Outputs a multiline helpful compiler error for the user. */ +char *splice_script_compiler_error(const tal_t *ctx, + const char *script, + struct splice_script_error *error); + +struct splice_script_error *parse_splice_script(const tal_t *ctx, + const char *script, + struct splice_script_chan *channels, + size_t channels_count, + struct json_stream **result); + +#endif diff --git a/common/test/Makefile b/common/test/Makefile index 69a87e0e16c9..3eaff8ab2ed1 100644 --- a/common/test/Makefile +++ b/common/test/Makefile @@ -105,6 +105,16 @@ common/test/run-version: \ wire/towire.o +common/test/run-splice_script: \ + common/amount.o \ + common/node_id.o \ + common/bech32.o \ + common/splice_script.o \ + common/type_to_string.o \ + wire/fromwire.o \ + wire/towire.o + + common/test/run-trace: \ common/amount.o \ common/memleak.o \ diff --git a/common/test/run-splice_script.c b/common/test/run-splice_script.c new file mode 100644 index 000000000000..c0eb79ca4767 --- /dev/null +++ b/common/test/run-splice_script.c @@ -0,0 +1,75 @@ +#include "config.h" +#include "../version.c" +#include +#include +#include +#include +#include + +int main(int argc, const char *argv[]) +{ + struct splice_script_chan channels[] = + { + { + .node_id = { .k = { 0x03, 0x99, 0x06, 0x9f, 0x16, 0x93, 0xfd, 0x89, 0xa4, 0x53, 0xf0, 0xca, 0xf0, 0x3e, 0xe3, 0x6b, 0x6f, 0x6c, 0x8a, 0xba, 0xa7, 0xef, 0x77, 0x8d, 0x3e, 0x2b, 0xcc, 0x7c, 0x2b, 0x44, 0x12, 0x01, 0x00 } }, + .chan_id = { .id = { 0xf5, 0x69, 0x9c, 0x3d, 0x53, 0x02, 0xe4, 0x48, 0x6c, 0x83, 0xef, 0x9d, 0x0f, 0x2d, 0x12, 0xa9, 0x69, 0xab, 0x41, 0xcc, 0xc9, 0x30, 0x1b, 0xd0, 0x42, 0xc5, 0x07, 0x60, 0xa8, 0x7b, 0x27, 0x00 } }, + }, + { + .node_id = { .k = { 0x03, 0x93, 0x06, 0x9f, 0x16, 0x93, 0xfd, 0x89, 0xa4, 0x53, 0xf0, 0xca, 0xf0, 0x3e, 0xe3, 0x6b, 0x6f, 0x6c, 0x8a, 0xba, 0xa7, 0xef, 0x77, 0x8d, 0x3e, 0x2b, 0xcc, 0x7c, 0x2b, 0x44, 0x12, 0x01, 0x01 } }, + .chan_id = { .id = { 0xf5, 0x69, 0x9c, 0x3d, 0x53, 0x02, 0xe4, 0x48, 0x6c, 0x83, 0xef, 0x9d, 0x0f, 0x2d, 0x12, 0xa9, 0x69, 0xab, 0x41, 0xcc, 0xc9, 0x30, 0x1b, 0xd0, 0x42, 0xc5, 0x07, 0x60, 0xa8, 0x7b, 0x27, 0x01 } }, + }, + { + .node_id = { .k = { 0x03, 0x93, 0x06, 0x9f, 0x16, 0x93, 0xfd, 0x89, 0xa4, 0x53, 0xf0, 0xca, 0xf0, 0x3e, 0xe3, 0x6b, 0x6f, 0x6c, 0x8a, 0xba, 0xa7, 0xef, 0x77, 0x8d, 0x3e, 0x2b, 0xcc, 0x7c, 0x2b, 0x44, 0x12, 0x01, 0x01 } }, + .chan_id = { .id = { 0xf5, 0x69, 0x9c, 0x3d, 0x53, 0x02, 0xe4, 0x48, 0x6c, 0x83, 0xef, 0x9d, 0x0f, 0x2d, 0x12, 0xa9, 0x69, 0xab, 0x41, 0xcc, 0xc9, 0x30, 0x1b, 0xd0, 0x42, 0xc5, 0x07, 0x60, 0xa8, 0x7b, 0x27, 0x02 } }, + }, + { + .node_id = { .k = { 0x03, 0x93, 0x06, 0x9f, 0x16, 0x93, 0xfd, 0x89, 0xa4, 0x53, 0xf0, 0xca, 0xf0, 0x3e, 0xe3, 0x6b, 0x6f, 0x6c, 0x8a, 0xba, 0xa7, 0xef, 0x77, 0x8d, 0x3e, 0x2b, 0xcc, 0x7c, 0x2b, 0x44, 0x12, 0x01, 0x01 } }, + .chan_id = { .id = { 0xf5, 0x69, 0x9c, 0x3d, 0x53, 0x02, 0xe4, 0x48, 0x6c, 0x83, 0xef, 0x9d, 0x0f, 0x2d, 0x12, 0xa9, 0x69, 0xab, 0x41, 0xcc, 0xc9, 0x30, 0x1b, 0xd0, 0x42, 0xc5, 0x07, 0x60, 0xa8, 0x7b, 0x27, 0x03 } }, + }, + { + .node_id = { .k = { 0x03, 0x93, 0x06, 0x9f, 0x16, 0x93, 0xfd, 0x89, 0xa4, 0x53, 0xf0, 0xca, 0xf0, 0x3e, 0xe3, 0x6b, 0x6f, 0x6c, 0x8a, 0xba, 0xa7, 0xef, 0x77, 0x8d, 0x3e, 0x2b, 0xcc, 0x7c, 0x2b, 0x44, 0x12, 0x01, 0x01 } }, + .chan_id = { .id = { 0xf5, 0x69, 0x9c, 0x3d, 0x53, 0x02, 0xe4, 0x48, 0x6c, 0x83, 0xef, 0x9d, 0x0f, 0x2d, 0x12, 0xa9, 0x69, 0xab, 0x41, 0xcc, 0xc9, 0x30, 0x1b, 0xd0, 0x42, 0xc5, 0x07, 0x60, 0xa8, 0x7b, 0x27, 0x04 } }, + }, + { + .node_id = { .k = { 0x03, 0x93, 0x06, 0x9f, 0x16, 0x93, 0xfd, 0x89, 0xa4, 0x53, 0xf0, 0xca, 0xf0, 0x3e, 0xe3, 0x6b, 0x6f, 0x6c, 0x8a, 0xba, 0xa7, 0xef, 0x77, 0x8d, 0x3e, 0x2b, 0xcc, 0x7c, 0x2b, 0x44, 0x12, 0x01, 0x01 } }, + .chan_id = { .id = { 0xf5, 0x69, 0x9c, 0x3d, 0x53, 0x02, 0xe4, 0x48, 0x6c, 0x83, 0xef, 0x9d, 0x0f, 0x2d, 0x12, 0xa9, 0x69, 0xab, 0x41, 0xcc, 0xc9, 0x30, 0x1b, 0xd0, 0x42, 0xc5, 0x07, 0x60, 0xa8, 0x7b, 0x27, 0x05 } }, + }, + { + .node_id = { .k = { 0x03, 0x94, 0x06, 0x9f, 0x16, 0x93, 0xfd, 0x89, 0xa4, 0x53, 0xf0, 0xca, 0xf0, 0x3e, 0xe3, 0x6b, 0x6f, 0x6c, 0x8a, 0xba, 0xa7, 0xef, 0x77, 0x8d, 0x3e, 0x2b, 0xcc, 0x7c, 0x2b, 0x44, 0x12, 0x01, 0x01 } }, + .chan_id = { .id = { 0xf5, 0x69, 0x9c, 0x3d, 0x53, 0x02, 0xe4, 0x48, 0x6c, 0x83, 0xef, 0x9d, 0x0f, 0x2d, 0x12, 0xa9, 0x69, 0xab, 0x41, 0xcc, 0xc9, 0x30, 0x1b, 0xd0, 0x42, 0xc5, 0x07, 0x60, 0xa8, 0x7b, 0x27, 0x06 } }, + }, + { + .node_id = { .k = { 0x03, 0x94, 0x06, 0x9f, 0x16, 0x93, 0xfd, 0x89, 0xa4, 0x53, 0xf0, 0xca, 0xf0, 0x3e, 0xe3, 0x6b, 0x6f, 0x6c, 0x8a, 0xba, 0xa7, 0xef, 0x77, 0x8d, 0x3e, 0x2b, 0xcc, 0x7c, 0x2b, 0x44, 0x12, 0x01, 0x01 } }, + .chan_id = { .id = { 0xf4, 0x69, 0x9c, 0x3d, 0x53, 0x02, 0xe4, 0x48, 0x6c, 0x83, 0xef, 0x9d, 0x0f, 0x2d, 0x12, 0xa9, 0x69, 0xab, 0x41, 0xcc, 0xc9, 0x30, 0x1b, 0xd0, 0x42, 0xc5, 0x07, 0x60, 0xa8, 0x7b, 0x27, 0x00 } }, + }, + }; + + const char *script = "" + "0->0399:0->3M;" + "3M->bcrt1pp5ygqjg0q3mmv8ng8ceu59kl5a3etlf2vvryvnnyumvdyr8a77tqx507vk;" + "wallet->1M;" + "0->f4699c->3M;" + "0->0393069f1693fd89a453f0caf03ee36b6f6c8abaa7ef778d3e2bcc7c2b44120101:0->*\n" + "0->0393069f1693fd89a453f0caf03ee36b6f6c8abaa7ef778d3e2bcc7c2b44120101:?->*\n" + "0->03930:*->*\n" + "3M|2M@12.34%->nodeid->0\n" + "*->f5699c3d5302e4486c83ef9d0f2d12a969ab41ccc9301bd042c50760a87b27a2;" + "|4.91M@2%->f5699c3d5302e4486c83ef9d0f2d12a969ab41ccc9301bd042c50760a87b27a3;" + "4M|100K->f5699c3d5302e4486c83ef9d0f2d12a969ab41ccc9301bd042c50760a87b27a4;"; + struct splice_script_error *error; + struct json_stream *result; + + printf("Parsing script:\n%s\n", script); + + chainparams = chainparams_for_network("regtest"); + + error = parse_splice_script(tmpctx, script, channels, + sizeof(channels) / sizeof(channels[0]), + &result); + + if (error) + printf("%s\n", splice_script_compiler_error(tmpctx, script, + error)); + + return 0; +} diff --git a/plugins/Makefile b/plugins/Makefile index 0e831f13356d..0eac1b306bdf 100644 --- a/plugins/Makefile +++ b/plugins/Makefile @@ -48,13 +48,15 @@ PLUGIN_SPENDER_SRC := \ plugins/spender/main.c \ plugins/spender/multifundchannel.c \ plugins/spender/multiwithdraw.c \ - plugins/spender/openchannel.c + plugins/spender/openchannel.c \ + plugins/spender/splice.c PLUGIN_SPENDER_HEADER := \ plugins/spender/multifundchannel.h \ plugins/spender/multiwithdraw.h \ plugins/spender/fundchannel.h \ plugins/spender/multifundchannel.h \ - plugins/spender/openchannel.h + plugins/spender/openchannel.h \ + plugins/spender/splice.h PLUGIN_SPENDER_OBJS := $(PLUGIN_SPENDER_SRC:.c=.o) PLUGIN_FUNDER_SRC := \ @@ -165,6 +167,9 @@ PLUGIN_COMMON_OBJS := \ common/random_select.o \ common/scb_wiregen.o \ common/setup.o \ + common/node_id.o \ + common/bech32.o \ + common/splice_script.o \ common/status_levels.o \ common/type_to_string.o \ common/utils.o \