Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 45 additions & 27 deletions manifests/integrations/kafka.pp
Original file line number Diff line number Diff line change
@@ -1,47 +1,65 @@
# Class: datadog_agent::integrations::kafka
#
# This class will install the necessary configuration for the kafka
# integration
# This class will install the necessary configuration for the kafka integration
#
# Parameters:
# $host:
# The hostname kafka is running on
# $port:
# The port to connect on
# $user
# The user for the datadog user
# The host kafka is running on. Defaults to 'localhost'
# $username
# Optionally specify username for connection
# $password
# The password for the datadog user
# Optionally specify password for connection
# $port
# The port kafka is running on. Defaults to 9999
# $name
# Name given to kafka instance
# $process_name_regex
# Instead of specifying a host, and port. The agent can connect using the attach api.
# $tools_jar_path
# Path to tools jar needs to be set when process_name_regex is set
# $java_bin_path
# Path to java binary, should be set if agent cant find your java executable
# $trust_store_path
# Path to the trust store, should be set if ssl is enabled
# $trust_store_password
# Password to the trust store
# $tags
# Optional array of tags
#
#
# Sample Usage:
#
# class { 'datadog_agent::integrations::kafka' :
# host => 'localhost',
# tags => {
# environment => "production",
# },
# servers => [
# {
# 'host' => 'localhost',
# 'username' => 'kafka_username',
# 'password' => 'kafka_password',
# 'port' => '9999',
# 'name' => 'kafka_instance',
# 'process_name_regex' => '.*process_name.*',
# 'tools_jar_path' => '/usr/lib/jvm/java-7-openjdk-amd64/lib/tools.jar',
# 'java_bin_path' => '/path/to/java',
# 'trust_store_path' => '/path/to/trustStore.jks',
# 'trust_store_password' => 'password',
# 'tags' => ['env: test', 'sometag: someinfo'],
# },
# {
# 'host' => 'localhost',
# 'port' => '9999',
# 'tags' => [],
# },
# ]
# }
#
#
class datadog_agent::integrations::kafka(
$host = 'localhost',
$port = 9999,
$user = undef,
$password = undef,
$tags = { kafka => broker },
$servers = [{'host' => 'localhost', 'port' => '9999'}]
) inherits datadog_agent::params {
require ::datadog_agent
validate_hash($tags)
include datadog_agent

if !$::datadog_agent::agent5_enable {
$dst = "${datadog_agent::conf6_dir}/kafka.yaml"
} else {
$dst = "${datadog_agent::conf_dir}/kafka.yaml"
}
validate_array($servers)

file { $dst:
file { "${datadog_agent::params::conf_dir}/kafka.yaml":
ensure => file,
owner => $datadog_agent::params::dd_user,
group => $datadog_agent::params::dd_group,
Expand All @@ -50,4 +68,4 @@
require => Package[$datadog_agent::params::package_name],
notify => Service[$datadog_agent::params::service_name],
}
}
}
131 changes: 131 additions & 0 deletions spec/classes/datadog_agent_integrations_kafka_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
require 'spec_helper'

describe 'datadog_agent::integrations::kafka' do
let(:facts) {{
operatingsystem: 'Ubuntu',
}}
let(:conf_dir) { '/etc/dd-agent/conf.d' }
let(:dd_user) { 'dd-agent' }
let(:dd_group) { 'root' }
let(:dd_package) { 'datadog-agent' }
let(:dd_service) { 'datadog-agent' }
let(:conf_file) { "#{conf_dir}/kafka.yaml" }

it { should compile.with_all_deps }
it { should contain_file(conf_file).with(
owner: dd_user,
group: dd_group,
mode: '0600',
)}
it { should contain_file(conf_file).that_requires("Package[#{dd_package}]") }
it { should contain_file(conf_file).that_notifies("Service[#{dd_service}]") }

context 'with default parameters' do
it { should contain_file(conf_file).with_content(%r{- host: localhost\s+port: 9999}) }
it { should contain_file(conf_file).without_content(%r{tags:}) }
end


context 'with one kafka broker' do
let(:params) {{
servers: [
{
'host' => 'localhost',
'port' => '9997',
'tags' => %w{ kafka:broker tag1:value1 },
}
]
}}

it { should contain_file(conf_file).with_content(%r{host: localhost\s+port: 9997\s+tags:\s+- kafka:broker\s+- tag1:value1}) }
end

context 'with two kafka brokers' do
let(:params) {{
servers: [
{
'host' => 'localhost',
'port' => '9997',
'tags' => %w{ kafka:broker tag1:value1 },
},
{
'host' => 'remotehost',
'port' => '9998',
'tags' => %w{ kafka:remote tag2:value2 },
}
]
}}

it { should contain_file(conf_file).with_content(%r{host: localhost\s+port: 9997\s+tags:\s+- kafka:broker\s+- tag1:value1}) }
it { should contain_file(conf_file).with_content(%r{host: remotehost\s+port: 9998\s+tags:\s+- kafka:remote\s+- tag2:value2}) }

end

context 'one kafka broker all options' do
let(:params) {{
servers: [
{
'host' => 'localhost',
'port' => '9997',
'tags' => %w{ kafka:broker tag1:value1 },
'username' => 'username',
'password' => 'password',
'process_name_regex' => 'regex',
'tools_jar_path' => 'tools.jar',
'name' => 'kafka_instance',
'java_bin_path' => '/path/to/java',
'trust_store_path' => '/path/to/trustStore.jks',
'trust_store_password' => 'password'
}
]
}}

it { should contain_file(conf_file).with_content(%r{host: localhost\s+port: 9997\s+tags:\s+- kafka:broker\s+- tag1:value1\s+user: username\s+password: password\s+process_name_regex: regex\s+tools_jar_path: tools.jar\s+name: kafka_instance\s+java_bin_path: /path/to/java\s+trust_store_path: /path/to/trustStore.jks\s+trust_store_password: password}) }
end
=begin
context 'with multiple mongos' do
let(:params) {{
servers: [
{
'host' => '127.0.0.1',
'port' => '34567',
'tags' => %w{foo bar},
},
{
'host' => '127.0.0.2',
'port' => '45678',
'tags' => %w{baz bat},
}
]
}}
it { should contain_file(conf_file).with_content(%r{server: mongodb://127.0.0.1:34567/\s+tags:\s+- foo\s+- bar}) }
it { should contain_file(conf_file).with_content(%r{server: mongodb://127.0.0.2:45678/\s+tags:\s+- baz\s+- bat}) }
it { should contain_file(conf_file).with_content(%r{server:.*127.0.0.1.*server:.*127.0.0.2}m) }
end

context 'without tags' do
let(:params) {{
servers: [
{
'host' => '127.0.0.1',
'port' => '12345',
}
]
}}

end

context 'weird tags' do
let(:params) {{
servers: [
{
'host' => '127.0.0.1',
'port' => '56789',
'tags' => 'word'
}
]
}}
it { should_not compile }
end
=end
end
93 changes: 51 additions & 42 deletions templates/agent-conf.d/kafka.yaml.erb
Original file line number Diff line number Diff line change
@@ -1,51 +1,57 @@
### MANAGED BY PUPPET
#
# MANAGED BY PUPPET
#

##########
# WARNING
##########
# This sample works only for Kafka >= 0.8.2.
# If you are running a version older than that, you can refer to agent 5.2.x released
# sample files, https://raw.githubusercontent.com/DataDog/dd-agent/5.2.1/conf.d/kafka.yaml.example

instances:
- host: <%= @host %>
port: <%= @port %> # This is the JMX port on which Kafka exposes its metrics (usually 9999)
<% if @tags.length > 0 -%>
<% @servers.each do |server| -%>
- host: <%= server['host'] %>
port: <%= server['port'] %>
<%- if !server['tags'].nil? && server['tags'].any? -%>
tags:
<%- @tags.each do |tag_name, tag_value| -%>
<%= tag_name %>: <%= tag_value %>
<% end -%>
<% end -%>
<% if @user -%>
user: <%= @user %>
password: <%= @password %>
<%- server['tags'].each do |tag| -%>
- <%= tag %>
<%- end -%>
<%- end -%>
<%- if !server['username'].nil? -%>
user: <%= server['username'] %>
<%- end -%>
<%- if !server['password'].nil? -%>
password: <%= server['password'] %>
<%- end -%>
<%- if !server['process_name_regex'].nil? -%>
process_name_regex: <%= server['process_name_regex'] %>
<%- end -%>
<%- if !server['tools_jar_path'].nil? -%>
tools_jar_path: <%= server['tools_jar_path'] %>
<%- end -%>
<%- if !server['name'].nil? -%>
name: <%= server['name'] %>
<%- end -%>
<%- if !server['java_bin_path'].nil? -%>
java_bin_path: <%= server['java_bin_path'] %>
<%- end -%>
<%- if !server['trust_store_path'].nil? -%>
trust_store_path: <%= server['trust_store_path'] %>
<%- end -%>
<%- if !server['trust_store_password'].nil? -%>
trust_store_password: <%= server['trust_store_password'] %>
<%- end -%>
<% end -%>
# process_name_regex: .*process_name.* # Instead of specifying a host, and port. The agent can connect using the attach api.
# # This requires the JDK to be installed and the path to tools.jar to be set below.
# tools_jar_path: /usr/lib/jvm/java-7-openjdk-amd64/lib/tools.jar # To be set when process_name_regex is set
# name: kafka_instance
# java_bin_path: /path/to/java # Optional, should be set if the agent cannot find your java executable
# trust_store_path: /path/to/trustStore.jks # Optional, should be set if ssl is enabled
# trust_store_password: password
# - host: remotehost
# port: 9998 # Producer
# tags:
# kafka: producer0
# env: stage
# newTag: test
# - host: remotehost
# port: 9997 # Consumer
# tags:
# kafka: consumer0
# env: stage
# newTag: test

init_config:
is_jmx: true

# Metrics collected by this check. You should not have to modify this.
conf:
# v0.8.2.x Producers
#
# Producers (only v0.8.2.x)
#
- include:
domain: 'kafka.producer'
bean_regex: 'kafka\.producer:type=ProducerRequestMetrics,name=ProducerRequestRateAndTimeMs,clientId=.*'
Expand Down Expand Up @@ -75,8 +81,9 @@ init_config:
metric_type: rate
alias: kafka.producer.message_rate


# v0.9.0.x Producers
#
# Producers (v0.9.0.x to v0.10.2.x)
#
- include:
domain: 'kafka.producer'
bean_regex: 'kafka\.producer:type=producer-metrics,client-id=.*'
Expand Down Expand Up @@ -113,8 +120,9 @@ init_config:
metric_type: gauge
alias: kafka.producer.io_wait


# v0.8.2.x Consumers
#
# Consumers (only v0.8.2.x)
#
- include:
domain: 'kafka.consumer'
bean_regex: 'kafka\.consumer:type=ConsumerFetcherManager,name=MaxLag,clientId=.*'
Expand Down Expand Up @@ -143,25 +151,26 @@ init_config:
Count:
metric_type: rate
alias: kafka.consumer.messages_in

# Offsets committed to ZooKeeper
- include:
# Offsets committed to ZooKeeper
domain: 'kafka.consumer'
bean_regex: 'kafka\.consumer:type=ZookeeperConsumerConnector,name=ZooKeeperCommitsPerSec,clientId=.*'
attribute:
Count:
metric_type: rate
alias: kafka.consumer.zookeeper_commits
# Offsets committed to Kafka
- include:
# Offsets committed to Kafka
domain: 'kafka.consumer'
bean_regex: 'kafka\.consumer:type=ZookeeperConsumerConnector,name=KafkaCommitsPerSec,clientId=.*'
attribute:
Count:
metric_type: rate
alias: kafka.consumer.kafka_commits

# v0.9.0.x Consumers
#
# Consumers (v0.9.0.x to v0.10.2.x)
#
- include:
domain: 'kafka.consumer'
bean_regex: 'kafka\.consumer:type=consumer-fetch-manager-metrics,client-id=.*'
Expand Down Expand Up @@ -311,8 +320,8 @@ init_config:
domain: 'kafka.server'
bean: 'kafka.server:type=KafkaRequestHandlerPool,name=RequestHandlerAvgIdlePercent'
attribute:
Count:
metric_type: rate
OneMinuteRate:
metric_type: gauge
alias: kafka.request.handler.avg.idle.pct.rate
- include:
domain: 'kafka.server'
Expand Down