-
Notifications
You must be signed in to change notification settings - Fork 2.8k
feat(pubsub): support kafka tls and sasl/plain auth #7046
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 12 commits
d431550
366ce56
4339cb2
e89d31c
5a513d0
61aefd0
131c130
3071725
f1d9283
e31f186
185fbb4
a1510bf
45d4f33
a68e2e6
6ee9a2b
a0753a8
d0d5566
d56772c
99bced1
6f87da0
9f497b8
30b00a7
643414d
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,63 @@ | ||
| -- | ||
| -- Licensed to the Apache Software Foundation (ASF) under one or more | ||
| -- contributor license agreements. See the NOTICE file distributed with | ||
| -- this work for additional information regarding copyright ownership. | ||
| -- The ASF licenses this file to You under the Apache License, Version 2.0 | ||
| -- (the "License"); you may not use this file except in compliance with | ||
| -- the License. You may obtain a copy of the License at | ||
| -- | ||
| -- http://www.apache.org/licenses/LICENSE-2.0 | ||
| -- | ||
| -- Unless required by applicable law or agreed to in writing, software | ||
| -- distributed under the License is distributed on an "AS IS" BASIS, | ||
| -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| -- See the License for the specific language governing permissions and | ||
| -- limitations under the License. | ||
| -- | ||
| local core = require("apisix.core") | ||
|
|
||
|
|
||
| local schema = { | ||
| type = "object", | ||
| properties = { | ||
| sasl = { | ||
| type = "object", | ||
| properties = { | ||
| username = { | ||
| type = "string", | ||
| default = "", | ||
tzssangglass marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| }, | ||
| password = { | ||
| type = "string", | ||
| default = "", | ||
|
||
| }, | ||
| }, | ||
| required = {"username", "password"}, | ||
| }, | ||
| }, | ||
| } | ||
|
|
||
|
|
||
| local _M = { | ||
| version = 0.1, | ||
| priority = 508, | ||
| name = "kafka-proxy", | ||
| schema = schema, | ||
| } | ||
|
|
||
|
|
||
| function _M.check_schema(conf) | ||
| return core.schema.check(schema, conf) | ||
| end | ||
|
|
||
|
|
||
| function _M.access(conf, ctx) | ||
| if conf.sasl then | ||
| ctx.kafka_consumer_enable_sasl = true | ||
| ctx.kafka_consumer_sasl_username = conf.sasl.username | ||
| ctx.kafka_consumer_sasl_password = conf.sasl.password | ||
| end | ||
| end | ||
|
|
||
|
|
||
| return _M | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,145 @@ | ||
| -- | ||
| -- Licensed to the Apache Software Foundation (ASF) under one or more | ||
| -- contributor license agreements. See the NOTICE file distributed with | ||
| -- this work for additional information regarding copyright ownership. | ||
| -- The ASF licenses this file to You under the Apache License, Version 2.0 | ||
| -- (the "License"); you may not use this file except in compliance with | ||
| -- the License. You may obtain a copy of the License at | ||
| -- | ||
| -- http://www.apache.org/licenses/LICENSE-2.0 | ||
| -- | ||
| -- Unless required by applicable law or agreed to in writing, software | ||
| -- distributed under the License is distributed on an "AS IS" BASIS, | ||
| -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| -- See the License for the specific language governing permissions and | ||
| -- limitations under the License. | ||
| -- | ||
|
|
||
| local core = require("apisix.core") | ||
| local bconsumer = require("resty.kafka.basic-consumer") | ||
| local ffi = require("ffi") | ||
| local C = ffi.C | ||
| local tostring = tostring | ||
| local type = type | ||
| local ipairs = ipairs | ||
| local str_sub = string.sub | ||
|
|
||
| ffi.cdef[[ | ||
| int64_t atoll(const char *num); | ||
| ]] | ||
|
|
||
|
|
||
| local _M = {} | ||
|
|
||
|
|
||
| -- Handles the conversion of 64-bit integers in the lua-protobuf. | ||
| -- | ||
| -- Because of the limitations of luajit, we cannot use native 64-bit | ||
| -- numbers, so pb decode converts int64 to a string in #xxx format | ||
| -- to avoid loss of precision, by this function, we convert this | ||
| -- string to int64 cdata numbers. | ||
| local function pb_convert_to_int64(src) | ||
| if type(src) == "string" then | ||
| return C.atoll(ffi.cast("char *", src) + 1) | ||
| else | ||
| return src | ||
| end | ||
| end | ||
|
|
||
|
|
||
| -- Takes over requests of type kafka upstream in the http_access phase. | ||
| function _M.access(api_ctx) | ||
| local pubsub, err = core.pubsub.new() | ||
| if not pubsub then | ||
| core.log.error("failed to initialize pubsub module, err: ", err) | ||
| core.response.exit(400) | ||
| return | ||
| end | ||
|
|
||
| local up_nodes = api_ctx.matched_upstream.nodes | ||
|
|
||
| -- kafka client broker-related configuration | ||
| local broker_list = {} | ||
| for i, node in ipairs(up_nodes) do | ||
| broker_list[i] = { | ||
| host = node.host, | ||
| port = node.port, | ||
| } | ||
|
|
||
| if api_ctx.kafka_consumer_enable_sasl then | ||
| broker_list[i].sasl_config = { | ||
| mechanism = "PLAIN", | ||
| user = api_ctx.kafka_consumer_sasl_username, | ||
| password = api_ctx.kafka_consumer_sasl_password, | ||
| } | ||
| end | ||
| end | ||
|
|
||
| local client_config = {refresh_interval = 30 * 60 * 1000} | ||
| if api_ctx.matched_upstream.tls then | ||
| client_config.ssl = true | ||
| client_config.ssl_verify = api_ctx.matched_upstream.tls.verify | ||
| end | ||
|
|
||
| -- load and create the consumer instance when it is determined | ||
| -- that the websocket connection was created successfully | ||
| local consumer = bconsumer:new(broker_list, client_config) | ||
|
|
||
| pubsub:on("cmd_kafka_list_offset", function (params) | ||
| -- The timestamp parameter uses a 64-bit integer, which is difficult | ||
| -- for luajit to handle well, so the int64_as_string option in | ||
| -- lua-protobuf is used here. Smaller numbers will be decoded as | ||
| -- lua number, while overly larger numbers will be decoded as strings | ||
| -- in the format #number, where the # symbol at the beginning of the | ||
| -- string will be removed and converted to int64_t with the atoll function. | ||
| local timestamp = pb_convert_to_int64(params.timestamp) | ||
|
|
||
| local offset, err = consumer:list_offset(params.topic, params.partition, timestamp) | ||
|
|
||
| if not offset then | ||
| return nil, "failed to list offset, topic: " .. params.topic .. | ||
| ", partition: " .. params.partition .. ", err: " .. err | ||
| end | ||
|
|
||
| offset = tostring(offset) | ||
| return { | ||
| kafka_list_offset_resp = { | ||
| offset = str_sub(offset, 1, #offset - 2) | ||
| } | ||
| } | ||
| end) | ||
|
|
||
| pubsub:on("cmd_kafka_fetch", function (params) | ||
| local offset = pb_convert_to_int64(params.offset) | ||
|
|
||
| local ret, err = consumer:fetch(params.topic, params.partition, offset) | ||
| if not ret then | ||
| return nil, "failed to fetch message, topic: " .. params.topic .. | ||
| ", partition: " .. params.partition .. ", err: " .. err | ||
| end | ||
|
|
||
| -- split into multiple messages when the amount of data in | ||
| -- a single batch is too large | ||
| local messages = ret.records | ||
|
|
||
| -- special handling of int64 for luajit compatibility | ||
| for _, message in ipairs(messages) do | ||
| local timestamp = tostring(message.timestamp) | ||
| message.timestamp = str_sub(timestamp, 1, #timestamp - 2) | ||
| local offset = tostring(message.offset) | ||
| message.offset = str_sub(offset, 1, #offset - 2) | ||
| end | ||
|
|
||
| return { | ||
| kafka_fetch_resp = { | ||
| messages = messages, | ||
| }, | ||
| } | ||
| end) | ||
|
|
||
| -- start processing client commands | ||
| pubsub:wait() | ||
| end | ||
|
|
||
|
|
||
| return _M |
| Original file line number | Diff line number | Diff line change | ||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -435,7 +435,7 @@ local function check_upstream_conf(in_dp, conf) | |||||||||||||
| end | ||||||||||||||
| end | ||||||||||||||
|
|
||||||||||||||
| if conf.tls then | ||||||||||||||
| if conf.tls and conf.tls.client_cert and conf.tls.client_key then | ||||||||||||||
|
||||||||||||||
| if conf.tls and conf.tls.client_cert and conf.tls.client_key then | |
| if conf.tls and conf.tls.client_cert then |
is enough?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yes, it's enough, we ensure client_cert and client_key both exist by jsonschema's dependencies. Any one of them separate exist is forbidden.
Lines 416 to 419 in 99bced1
| dependencies = { | |
| client_cert = {"client_key"}, | |
| client_key = {"client_cert"}, | |
| } |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
removed
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Is it more appropriate to put it in the
linux-ci-init-service.shscript ?Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
First make sure that the certificate exists for docker-compose to start kafka. If the certificate does not exist then the kafka container will crash.