X-Git-Url: https://git.octo.it/?a=blobdiff_plain;f=src%2Fwrite_kafka.c;h=6018fea05502f511a1ed8794f9f11982810e1a82;hb=41288c6a9ed050b41ad47184aa1b53668c3588cc;hp=9fda2dfc13a416989afeb1de71b2ad6dcb84a99d;hpb=88001af68cb86d0d95fa143b031359b10eddbe1f;p=collectd.git diff --git a/src/write_kafka.c b/src/write_kafka.c index 9fda2dfc..6018fea0 100644 --- a/src/write_kafka.c +++ b/src/write_kafka.c @@ -60,7 +60,14 @@ static int kafka_write(const data_set_t *, const value_list_t *, user_data_t *); static int32_t kafka_partition(const rd_kafka_topic_t *, const void *, size_t, int32_t, void *, void *); -#if defined HAVE_LIBRDKAFKA_LOGGER || defined HAVE_LIBRDKAFKA_LOG_CB +/* Version 0.9.0 of librdkafka deprecates rd_kafka_set_logger() in favor of + * rd_kafka_conf_set_log_cb(). This is to make sure we're not using the + * deprecated function. */ +#ifdef HAVE_LIBRDKAFKA_LOG_CB +# undef HAVE_LIBRDKAFKA_LOGGER +#endif + +#if defined(HAVE_LIBRDKAFKA_LOGGER) || defined(HAVE_LIBRDKAFKA_LOG_CB) static void kafka_log(const rd_kafka_t *, int, const char *, const char *); static void kafka_log(const rd_kafka_t *rkt, int level, @@ -78,6 +85,15 @@ static uint32_t kafka_hash(const char *keydata, size_t keylen) return hash; } +/* 31 bit -> 4 byte -> 8 byte hex string + null byte */ +#define KAFKA_RANDOM_KEY_SIZE 9 +#define KAFKA_RANDOM_KEY_BUFFER (char[KAFKA_RANDOM_KEY_SIZE]) {""} +static char *kafka_random_key(char buffer[static KAFKA_RANDOM_KEY_SIZE]) +{ + ssnprintf(buffer, KAFKA_RANDOM_KEY_SIZE, "%08lX", (unsigned long) mrand48()); + return buffer; +} + static int32_t kafka_partition(const rd_kafka_topic_t *rkt, const void *keydata, size_t keylen, int32_t partition_cnt, void *p, void *m) @@ -172,9 +188,9 @@ static int kafka_write(const data_set_t *ds, /* {{{ */ switch (ctx->format) { case KAFKA_FORMAT_COMMAND: - status = create_putval(buffer, sizeof(buffer), ds, vl); + status = cmd_create_putval(buffer, sizeof(buffer), ds, vl); if (status != 0) { - ERROR("write_kafka plugin: create_putval failed with status %i.", + ERROR("write_kafka plugin: cmd_create_putval failed with status %i.", status); return status; } @@ -203,11 +219,10 @@ static int kafka_write(const data_set_t *ds, /* {{{ */ return -1; } - key = ctx->key; - if (key != NULL) - keylen = strlen (key); - else - keylen = 0; + key = (ctx->key != NULL) + ? ctx->key + : kafka_random_key(KAFKA_RANDOM_KEY_BUFFER); + keylen = strlen (key); rd_kafka_produce(ctx->topic, RD_KAFKA_PARTITION_UA, RD_KAFKA_MSG_F_COPY, buffer, blen, @@ -319,8 +334,12 @@ static void kafka_config_topic(rd_kafka_conf_t *conf, oconfig_item_t *ci) /* {{{ } } else if (strcasecmp ("Key", child->key) == 0) { - cf_util_get_string (child, &tctx->key); - assert (tctx->key != NULL); + if (cf_util_get_string (child, &tctx->key) != 0) + continue; + if (strcasecmp ("Random", tctx->key) == 0) { + sfree(tctx->key); + tctx->key = strdup (kafka_random_key (KAFKA_RANDOM_KEY_BUFFER)); + } } else if (strcasecmp ("Format", child->key) == 0) { status = cf_util_get_string(child, &key); if (status != 0) @@ -383,12 +402,11 @@ static void kafka_config_topic(rd_kafka_conf_t *conf, oconfig_item_t *ci) /* {{{ ssnprintf(callback_name, sizeof(callback_name), "write_kafka/%s", tctx->topic_name); - user_data_t ud = { - .data = tctx, - .free_func = kafka_topic_context_free - }; - - status = plugin_register_write (callback_name, kafka_write, &ud); + status = plugin_register_write (callback_name, kafka_write, + &(user_data_t) { + .data = tctx, + .free_func = kafka_topic_context_free, + }); if (status != 0) { WARNING ("write_kafka plugin: plugin_register_write (\"%s\") " "failed with status %i.",