Skip to content

Commit

Permalink
Merge branch 'lazy-paths'
Browse files Browse the repository at this point in the history
Closes #61.
  • Loading branch information
mthssdrbrg committed Nov 19, 2014
2 parents 70c30e2 + 16f51bc commit f9686e6
Show file tree
Hide file tree
Showing 8 changed files with 88 additions and 94 deletions.
83 changes: 1 addition & 82 deletions attributes/default.rb
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,7 @@
#
# Directory where the downloaded archive will be extracted to, and possibly
# compiled in.
default.kafka.build_dir = ::File.join(node.kafka.install_dir, 'build')

#
# Directory where to keep Kafka configuration files.
default.kafka.config_dir = ::File.join(node.kafka.install_dir, 'config')
default.kafka.build_dir = ::File.join(Chef::Config[:file_cache_path], 'kafka-build')

#
# Directory where to store logs from Kafka.
Expand Down Expand Up @@ -71,10 +67,6 @@
# JVM Performance options for Kafka.
default.kafka.jvm_performance_opts = '-server -XX:+UseCompressedOops -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:+DisableExplicitGC -Djava.awt.headless=true'

#
# GC log options for Kafka.
default.kafka.gc_log_opts = %(-Xloggc:#{node.kafka.log_dir}/kafka-gc.log -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps)

#
# The type of "init" system to install scripts for. Valid values are currently
# :sysv, :systemd and :upstart.
Expand Down Expand Up @@ -108,76 +100,3 @@
# Initially set it to an empty Hash to avoid having `fetch(:broker, {})`
# statements in helper methods and the alike.
default.kafka.broker = {}

#
# Root logger configuration.
default.kafka.log4j.root_logger = 'INFO, kafkaAppender'

#
# Appender definitions for various classes.
default.kafka.log4j.appenders = {
'kafkaAppender' => {
type: 'org.apache.log4j.DailyRollingFileAppender',
date_pattern: '.yyyy-MM-dd',
file: %(#{node.kafka.log_dir}/kafka.log),
layout: {
type: 'org.apache.log4j.PatternLayout',
conversion_pattern: '[%d] %p %m (%c)%n',
},
},
'stateChangeAppender' => {
type: 'org.apache.log4j.DailyRollingFileAppender',
date_pattern: '.yyyy-MM-dd',
file: %(#{node.kafka.log_dir}/kafka-state-change.log),
layout: {
type: 'org.apache.log4j.PatternLayout',
conversion_pattern: '[%d] %p %m (%c)%n',
},
},
'requestAppender' => {
type: 'org.apache.log4j.DailyRollingFileAppender',
date_pattern: '.yyyy-MM-dd',
file: %(#{node.kafka.log_dir}/kafka-request.log),
layout: {
type: 'org.apache.log4j.PatternLayout',
conversion_pattern: '[%d] %p %m (%c)%n',
},
},
'controllerAppender' => {
type: 'org.apache.log4j.DailyRollingFileAppender',
date_pattern: '.yyyy-MM-dd',
file: %(#{node.kafka.log_dir}/kafka-controller.log),
layout: {
type: 'org.apache.log4j.PatternLayout',
conversion_pattern: '[%d] %p %m (%c)%n',
},
},
}

#
# Logger definitions.
default.kafka.log4j.loggers = {
'org.IOItec.zkclient.ZkClient' => {
level: 'INFO',
},
'kafka.network.RequestChannel$' => {
level: 'WARN',
appender: 'requestAppender',
additivity: false,
},
'kafka.request.logger' => {
level: 'WARN',
appender: 'requestAppender',
additivity: false,
},
'kafka.controller' => {
level: 'INFO',
appender: 'controllerAppender',
additivity: false,
},
'state.change.logger' => {
level: 'INFO',
appender: 'stateChangeAppender',
additivity: false,
},
}
3 changes: 0 additions & 3 deletions recipes/_configure.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,6 @@
# Recipe:: _configure
#

include_recipe 'kafka::_defaults'


template ::File.join(node.kafka.config_dir, 'log4j.properties') do
source 'log4j.properties.erb'
owner node.kafka.user
Expand Down
73 changes: 73 additions & 0 deletions recipes/_defaults.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,76 @@
unless broker_attribute?(:port)
node.default.kafka.broker.port = 6667
end

node.default_unless.kafka.gc_log_opts = %W[
-Xloggc:#{node.kafka.log_dir}/kafka-gc.log
-verbose:gc
-XX:+PrintGCDetails
-XX:+PrintGCDateStamps
-XX:+PrintGCTimeStamps
].join(' ')
node.default_unless.kafka.config_dir = ::File.join(node.kafka.install_dir, 'config')
node.default_unless.kafka.log4j.root_logger = 'INFO, kafkaAppender'
node.default_unless.kafka.log4j.appenders = {
'kafkaAppender' => {
type: 'org.apache.log4j.DailyRollingFileAppender',
date_pattern: '.yyyy-MM-dd',
file: %(#{node.kafka.log_dir}/kafka.log),
layout: {
type: 'org.apache.log4j.PatternLayout',
conversion_pattern: '[%d] %p %m (%c)%n',
},
},
'stateChangeAppender' => {
type: 'org.apache.log4j.DailyRollingFileAppender',
date_pattern: '.yyyy-MM-dd',
file: %(#{node.kafka.log_dir}/kafka-state-change.log),
layout: {
type: 'org.apache.log4j.PatternLayout',
conversion_pattern: '[%d] %p %m (%c)%n',
},
},
'requestAppender' => {
type: 'org.apache.log4j.DailyRollingFileAppender',
date_pattern: '.yyyy-MM-dd',
file: %(#{node.kafka.log_dir}/kafka-request.log),
layout: {
type: 'org.apache.log4j.PatternLayout',
conversion_pattern: '[%d] %p %m (%c)%n',
},
},
'controllerAppender' => {
type: 'org.apache.log4j.DailyRollingFileAppender',
date_pattern: '.yyyy-MM-dd',
file: %(#{node.kafka.log_dir}/kafka-controller.log),
layout: {
type: 'org.apache.log4j.PatternLayout',
conversion_pattern: '[%d] %p %m (%c)%n',
},
},
}
node.default_unless.kafka.log4j.loggers = {
'org.IOItec.zkclient.ZkClient' => {
level: 'INFO',
},
'kafka.network.RequestChannel$' => {
level: 'WARN',
appender: 'requestAppender',
additivity: false,
},
'kafka.request.logger' => {
level: 'WARN',
appender: 'requestAppender',
additivity: false,
},
'kafka.controller' => {
level: 'INFO',
appender: 'controllerAppender',
additivity: false,
},
'state.change.logger' => {
level: 'INFO',
appender: 'stateChangeAppender',
additivity: false,
},
}
1 change: 1 addition & 0 deletions recipes/default.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

case kafka_install_method
when :source, :binary
include_recipe 'kafka::_defaults'
include_recipe 'kafka::_setup'
include_recipe 'kafka::%s' % node.kafka.install_method
include_recipe 'kafka::_configure'
Expand Down
2 changes: 1 addition & 1 deletion spec/recipes/binary_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

it 'extracts downloaded Kafka archive' do
expect(chef_run).to run_execute('extract-kafka').with({
cwd: '/opt/kafka/build',
cwd: %(#{Chef::Config[:file_cache_path]}/kafka-build),
user: 'root',
group: 'root'
})
Expand Down
10 changes: 7 additions & 3 deletions spec/recipes/configure_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,11 @@
ChefSpec::Runner.new do |node|
node.set[:kafka] = kafka_attributes
node.set[:kafka][:broker] = broker_attributes
end.converge(described_recipe)
end.converge(*described_recipes)
end

let :described_recipes do
['kafka::_defaults', described_recipe]
end

let :node do
Expand Down Expand Up @@ -48,7 +52,7 @@
let :chef_run do
ChefSpec::Runner.new do |node|
node.automatic[:ipaddress] = '255.255.255.255'
end.converge(described_recipe)
end.converge(*described_recipes)
end

it 'mod\'s it by 2**31' do
Expand Down Expand Up @@ -179,7 +183,7 @@
node.set[:kafka][:scala_version] = '2.8.0'
node.set[:kafka][:init_style] = init_style
node.set[:kafka][:broker] = broker_attributes
end.converge(described_recipe)
end.converge(*described_recipes)
end

let :platform_and_version do
Expand Down
4 changes: 2 additions & 2 deletions spec/recipes/setup_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
let :chef_run do
ChefSpec::Runner.new do |node|
node.set[:kafka] = kafka_attrs
end.converge(described_recipe)
end.converge('kafka::_defaults', described_recipe)
end

let :kafka_attrs do
Expand Down Expand Up @@ -54,7 +54,7 @@
end

it 'creates build directory' do
expect(chef_run).to create_directory('/opt/kafka/build').with({
expect(chef_run).to create_directory(%(#{Chef::Config[:file_cache_path]}/kafka-build)).with({
owner: 'kafka',
group: 'kafka',
mode: '755'
Expand Down
6 changes: 3 additions & 3 deletions spec/recipes/source_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
end

it 'runs execute block' do
expect(chef_run).to run_execute('compile-kafka').with_cwd('/opt/kafka/build')
expect(chef_run).to run_execute('compile-kafka').with_cwd(%(#{Chef::Config[:file_cache_path]}/kafka-build))
end

it 'uses sbt' do
Expand All @@ -62,7 +62,7 @@
end

it 'runs execute block' do
expect(chef_run).to run_execute('compile-kafka').with_cwd('/opt/kafka/build')
expect(chef_run).to run_execute('compile-kafka').with_cwd(%(#{Chef::Config[:file_cache_path]}/kafka-build))
end

it 'uses gradle' do
Expand All @@ -76,7 +76,7 @@
end

it 'runs execute block' do
expect(chef_run).to run_execute('compile-kafka').with_cwd('/opt/kafka/build')
expect(chef_run).to run_execute('compile-kafka').with_cwd(%(#{Chef::Config[:file_cache_path]}/kafka-build))
end

it 'uses gradle' do
Expand Down

0 comments on commit f9686e6

Please sign in to comment.