-
Notifications
You must be signed in to change notification settings - Fork 6
/
default.rb
123 lines (108 loc) · 3.09 KB
/
default.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
## ELLKTEST
## demonstrate and test ELLK's functionality
## https://xkcd.com/378/
packages = %w( tar nano htop )
## logstash and elasticsearch require java, let us install it then
case node[:platform_family]
when 'debian'
include_recipe 'apt'
packages << 'openjdk-7-jre-headless'
when 'rhel'
include_recipe 'yum'
packages << if node[:platform_version].to_i == 5
'java-1.7.0-openjdk'
else
'java-1.8.0-openjdk-headless'
end
end
packages.each do |pkg|
package pkg
end
include_recipe 'runit'
link '/usr/local/bin/sv' do
to '/usr/bin/sv'
end
# build up an array of logs to ship out
logs = []
# all of our tested linux flavors come with syslog, lets shove that in
logs << {
'paths' => ['/var/log/messages', '/var/log/*log'],
'fields' => {
'type' => 'syslog',
'chef_node' => node.name,
'chef_env' => node.chef_environment
}
}
# we stored an unencrypted certificate for localhost
# fetch it and store it on the system
secrets = Chef::DataBagItem.load('secrets', 'logstash')
logstash_key = Base64.decode64(secrets['key'])
file '/tmp/logstash.key' do
content logstash_key
sensitive true
end
logstash_crt = Base64.decode64(secrets['certificate'])
file '/tmp/logstash.crt' do
content logstash_crt
sensitive true
end
# install ELASTICSEARCH 1.7.1 instead of the default of 1.7.0
# and configure to use tmp for data storage
elasticsearch 'default' do
datadir '/tmp/es_datadir'
version '1.7.1'
checksum '86a0c20eea6ef55b14345bff5adf896e6332437b19180c4582a346394abde019'
url 'https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.7.1.tar.gz'
end
# install elasticsearch_plugin (license)
# elasticsearch_plugin 'license' do
# version '1.7.1'
# end
# We'll go ahead and ship elasticsearch logs, too
logs <<
{
'paths' => ['/var/log/elasticsearch/current'],
'fields' => {
'type' => 'eslasticsearch',
'chef_node' => node.name,
'chef_env' => node.chef_environment
}
}
## install LOGSTASH and source my ellktest's templates instead and demonstrate merging in execution vars
bonus_env = { 'HELLO' => 'WORLD', 'LS_USER' => 'kibana' } # for testing sake
bonus_conf = { 'test_value' => 'merged hash from a recipe!' }
logstash 'default' do
crt_location '/tmp/logstash.crt'
key_location '/tmp/logstash.key'
source 'ellktest'
runit_env bonus_env
conf_options bonus_conf
end
## install KIBANA and configure for port 8080, maybe we'll proxy to it from NGINX with some auth_basic?
kibana 'default' do
port 8080
end
## We may want to visualize Kibana's logs and pretend it is in a prod environment
logs <<
{
'paths' => ['/var/log/kibana/current'],
'fields' => {
'type' => 'kibana',
'chef_node' => node.name,
'chef_env' => 'prod'
}
}
logs.each do |conf|
conf['paths'].each do |log|
file log do
group 'logstash'
ignore_failure true
end
end
end
## Install the forwarder and configure it to ship everything in logs up to this point.
logstash_forwarder 'default' do
crt_location '/tmp/logstash.crt'
logstash_servers ['localhost:5043']
files logs
end