Skip to content

Commit

Permalink
Allow disabling default elasticsearch output for parser job
Browse files Browse the repository at this point in the history
Fix for cloudfoundry-community#8.
Property 'logstash_parser.outputs' is set with list of logstash output plugins, by default it is set to 'elasticsearch' plugin.
Rename 'logstash_parser' elasticsearch properties to follow the same naming convention - 'logstash_parser.elasticsearch.*'.
Add logstash-templates tests to test input_and_output.conf.erb template with different configurations.
Minor reqork in test scripts - all tests are run with default rake task.
Use FileUtils.* methods in Rakefile 'cleanup' tasks.
  • Loading branch information
hannayurkevich committed Nov 28, 2016
1 parent e77c9d5 commit 5e1fbea
Show file tree
Hide file tree
Showing 23 changed files with 399 additions and 63 deletions.
36 changes: 22 additions & 14 deletions jobs/parser/spec
Original file line number Diff line number Diff line change
Expand Up @@ -44,23 +44,38 @@ properties:
password: c1oudbunny
default: [ { plugin: "redis", options : {} } ]
logstash_parser.outputs:
description: "The configuration to embed into the logstash outputs section"
description: |
A list of output plugins, with a hash of options for each of them. Please refer to example below.
example:
inputs:
- plugin: mongodb
options:
uri: 192.168.1.1
database: logsearch
collection: logs
default: [ { plugin: "elasticsearch", options: {} } ]
logstash_parser.workers:
description: "The number of worker threads that logstash should use (default: auto = one per CPU)"
default: auto
logstash_parser.idle_flush_time:
description: "How frequently to flush events if the output queue is not full."
logstash_parser.elasticsearch_document_id:
logstash_parser.elasticsearch.idle_flush_time:
description: "How frequently to flush events if the output queue is not full."
logstash_parser.elasticsearch.document_id:
description: "Use a specific, dynamic ID rather than an auto-generated identifier."
default: ~
logstash_parser.elasticsearch_index:
logstash_parser.elasticsearch.index:
description: "The specific, dynamic index name to write events to."
default: "logstash-%{+YYYY.MM.dd}"
logstash_parser.elasticsearch_index_type:
logstash_parser.elasticsearch.index_type:
description: "The specific, dynamic index type name to write events to."
default: "%{@type}"
logstash_parser.elasticsearch_routing:
logstash_parser.elasticsearch.routing:
description: "The routing to be used when indexing a document."
logstash.elasticsearch.data_hosts:
description: The list of elasticsearch data node IPs
default: [127.0.0.1]
logstash.elasticsearch.flush_size:
description: Controls how many logs will be buffered and sent to Elasticsearch for bulk indexing
default: 500
logstash_parser.timecop.reject_greater_than_hours:
description: "Logs with timestamps greater than this many hours in the future won't be parsed and will get tagged with fail/timecop"
default: 1
Expand All @@ -74,13 +89,6 @@ properties:
description: "A list of index templates that need to be present in ElasticSearch before the process starts"
default: ["index_template"]

logstash.output.elasticsearch.data_hosts:
description: The list of elasticsearch data node IPs
default: [127.0.0.1]
logstash.output.elasticsearch.flush_size:
description: Controls how many logs will be buffered and sent to Elasticsearch for bulk indexing
default: 500

redis.host:
description: Redis host of queue
redis.port:
Expand Down
2 changes: 1 addition & 1 deletion jobs/parser/templates/bin/parser_ctl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ source /var/vcap/jobs/parser/helpers/ctl_setup.sh 'parser'

function wait_for_template {
local template_name="$1"
local MASTER_URL="<%= p("logstash.output.elasticsearch.data_hosts").first %>:9200"
local MASTER_URL="<%= p("logstash_parser.elasticsearch.data_hosts").first %>:9200"

set +e
while true; do
Expand Down
41 changes: 25 additions & 16 deletions jobs/parser/templates/config/input_and_output.conf.erb
Original file line number Diff line number Diff line change
Expand Up @@ -27,22 +27,31 @@ output {
}
<% end %>

elasticsearch {
hosts => [<%= p("logstash.output.elasticsearch.data_hosts").map { |ip| "\"#{ip}:9200\"" }.join(',') %>]
flush_size => <%= p("logstash.output.elasticsearch.flush_size") %>
<% if p('logstash_parser.idle_flush_time', nil) %>
idle_flush_time => <%= p('logstash_parser.idle_flush_time') %>
<% end %>
<% if p('logstash_parser.elasticsearch_document_id', nil) %>
document_id => "<%= p('logstash_parser.elasticsearch_document_id') %>"

<% p('logstash_parser.outputs').each do | output | %>
<%= output['plugin'] %> {
<% if 'elasticsearch' == output['plugin'] %>
<% output['options'] = {
"hosts" => [ p('logstash_parser.elasticsearch.data_hosts').map { |ip| "#{ip}:9200" }.join(',') ],
"flush_size" => p('logstash_parser.elasticsearch.flush_size'),
"index" => p('logstash_parser.elasticsearch.index'),
"document_type" => p('logstash_parser.elasticsearch.index_type'),
"manage_template" => false
}
if p('logstash_parser.elasticsearch.idle_flush_time', nil)
output['options']['idle_flush_time'] = p('logstash_parser.elasticsearch.idle_flush_time')
end
if p('logstash_parser.elasticsearch.document_id', nil)
output['options']['document_id'] = p('logstash_parser.elasticsearch.document_id')
end
if p('logstash_parser.elasticsearch.routing', nil)
output['options']['routing'] = p('logstash_parser.elasticsearch.routing')
end
%>
<% end %>
index => "<%= p('logstash_parser.elasticsearch_index') %>"
document_type => "<%= p('logstash_parser.elasticsearch_index_type') %>"
manage_template => false
<% if p('logstash_parser.elasticsearch_routing', nil) %>
routing => "<%= p('logstash_parser.elasticsearch_routing') %>"
<% output['options'].each do | k, v | %>
<%= k %> => <%= v.inspect %>
<% end %>
}

<%= p('logstash_parser.outputs', '') %>
}
<% end %>
}
10 changes: 5 additions & 5 deletions src/logsearch-config/Rakefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ require 'yaml'
require 'json'

task :clean do
mkdir_p "target"
rm_rf "target/*"
FileUtils.mkdir_p('target')
FileUtils.rm_rf(Dir.glob('target/*'))
end

desc "Builds filters & dashboards"
Expand All @@ -18,7 +18,7 @@ task :build => :clean do
end

desc "Runs all tests"
task test: [
task default: [
:test_filters,
:test_templates
]
Expand Down Expand Up @@ -48,8 +48,8 @@ task :test_templates, [:rspec_files] => :compile_templates do |t, args|
end

task :clean_templates do
mkdir_p 'test/logstash-templates/target'
rm_rf 'test/logstash-templates/target/*'
FileUtils.mkdir_p('test/logstash-templates/target')
FileUtils.rm_rf(Dir.glob('test/logstash-templates/target/*'))
end

desc "Compile bosh templates for tests"
Expand Down
6 changes: 3 additions & 3 deletions src/logsearch-config/bin/test
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ BASE_DIR=$(cd $SCRIPT_DIR/.. ; pwd)
cd $BASE_DIR

if [ $# -eq 0 ]
then rake test
then rake
elif [ $# -eq 1 ]
then rake test $1
then rake $1
elif [ $# -eq 2 ]
then rake test $1["$2"]
then rake $1["$2"]
fi
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,32 @@ template:
testcase:
- name: "default"
config: "input_and_output/test_default-config.yml"
destination: "target/input_and_output-default.conf"
destination: "target/input_and_output-test_default.conf"

- name: "enabled debug"
config: "input_and_output/test_enabled_debug-config.yml"
destination: "target/input_and_output-enabled_debug.conf"
destination: "target/input_and_output-test_enabled_debug.conf"

- name: "inputs"
config: "input_and_output/test_inputs-config.yml"
destination: "target/input_and_output-test_inputs.conf"

- name: "inputs - no redis"
config: "input_and_output/test_inputs_no_redis-config.yml"
destination: "target/input_and_output-test_inputs_no_redis.conf"

- name: "outputs"
config: "input_and_output/test_outputs-config.yml"
destination: "target/input_and_output-test_outputs.conf"

- name: "outputs - no elasticsearch"
config: "input_and_output/test_outputs_no_elasticsearch-config.yml"
destination: "target/input_and_output-test_outputs_no_elasticsearch.conf"

- name: "outputs - elasticsearch properties"
config: "input_and_output/test_outputs_elasticsearch_all_properties-config.yml"
destination: "target/input_and_output-test_outputs_elasticsearch_all_properties.conf"

- name: "outputs - elasticsearch properties"
config: "input_and_output/test_outputs_elasticsearch_required_properties-config.yml"
destination: "target/input_and_output-test_outputs_elasticsearch_required_properties.conf"
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,35 @@ def verify_input_and_output(actual, expected)
describe 'Parser input_and_output.conf' do

context "default" do
verify_input_and_output('input_and_output-default.conf', 'test_default-expected.conf')
verify_input_and_output('input_and_output-test_default.conf', 'test_default-expected.conf')
end

context "enabled debug" do
verify_input_and_output('input_and_output-enabled_debug.conf', 'test_enabled_debug-expected.conf')
verify_input_and_output('input_and_output-test_enabled_debug.conf', 'test_enabled_debug-expected.conf')
end

context "inputs" do
verify_input_and_output('input_and_output-test_inputs.conf', 'test_inputs-expected.conf')
end

context "inputs - no redis" do
verify_input_and_output('input_and_output-test_inputs_no_redis.conf', 'test_inputs_no_redis-expected.conf')
end

context "outputs" do
verify_input_and_output('input_and_output-test_outputs.conf', 'test_outputs-expected.conf')
end

context "outputs - no elasticsearch" do
verify_input_and_output('input_and_output-test_outputs_no_elasticsearch.conf', 'test_outputs_no_elasticsearch-expected.conf')
end

context "outputs - elasticsearch all properties" do
verify_input_and_output('input_and_output-test_outputs_elasticsearch_all_properties.conf', 'test_outputs_elasticsearch_all_properties-expected.conf')
end

context "outputs - elasticsearch required properties" do
verify_input_and_output('input_and_output-test_outputs_elasticsearch_required_properties.conf', 'test_outputs_elasticsearch_required_properties-expected.conf')
end

end
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
---
# --
# Required properties are set + default values (see "parser" job spec for default values).
# --
properties:
logstash_parser:
debug: false
inputs: [ { plugin: "redis", options : {} } ]
elasticsearch_document_id: ~
elasticsearch_index: "logs-%{[@metadata][index]}-%{+YYYY.MM.dd}"
elasticsearch_index_type: "%{@type}"
logstash:
output:
elasticsearch:
data_hosts: [127.0.0.1]
flush_size: 500
outputs: [ { plugin: "elasticsearch", options: {} } ]
elasticsearch:
document_id: ~
index: "logs-%{[@metadata][index]}-%{+YYYY.MM.dd}"
index_type: "%{@type}"
idle_flush_time: 100
data_hosts: [127.0.0.1]
flush_size: 500
redis:
# required, non-default
host: 127.0.0.1

port: 6379
key: logstash
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
input {
redis {
host => "127.0.0.1"
host => "127.0.0.1"
port => 6379
type => "redis-input"
data_type => "list"
Expand All @@ -16,5 +16,6 @@ output {
index => "logs-%{[@metadata][index]}-%{+YYYY.MM.dd}"
document_type => "%{@type}"
manage_template => false
idle_flush_time => 100
}
}
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
---
# --
# Default values + enabled logstash debug
# --
properties:
logstash_parser:
debug: true
inputs: [ { plugin: "redis", options : {} } ]
elasticsearch_document_id: ~
elasticsearch_index: "logs-%{[@metadata][index]}-%{+YYYY.MM.dd}"
elasticsearch_index_type: "%{@type}"
logstash:
output:
elasticsearch:
data_hosts: [127.0.0.1]
flush_size: 500
outputs: [ { plugin: "elasticsearch", options: {} } ]
elasticsearch:
document_id: ~
index: "logs-%{[@metadata][index]}-%{+YYYY.MM.dd}"
index_type: "%{@type}"
data_hosts: [127.0.0.1]
flush_size: 500
redis:
host: 127.0.0.1
port: 6379
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
input {
redis {
host => "127.0.0.1"
host => "127.0.0.1"
port => 6379
type => "redis-input"
data_type => "list"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# --
# Default values + additional inputs.
# --
properties:
logstash_parser:
debug: false
inputs: [ { plugin: "redis", options : {} }, { plugin: "file", options: { path: "my/path/to/file" } }, { plugin: "syslog", options: { host: "127.0.0.1", port: 123} } ]
outputs: [ { plugin: "elasticsearch", options: {} } ]
elasticsearch:
document_id: ~
index: "logs-%{[@metadata][index]}-%{+YYYY.MM.dd}"
index_type: "%{@type}"
data_hosts: [127.0.0.1]
flush_size: 500
redis:
host: 127.0.0.1
port: 6379
key: logstash
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
input {
redis {
host => "127.0.0.1"
port => 6379
type => "redis-input"
data_type => "list"
key => "logstash"
threads => 8
}

file {
path => "my/path/to/file"
}

syslog {
host => "127.0.0.1"
port => 123
}
}

output {
elasticsearch {
hosts => ["127.0.0.1:9200"]
flush_size => 500
index => "logs-%{[@metadata][index]}-%{+YYYY.MM.dd}"
document_type => "%{@type}"
manage_template => false
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# --
# Default values + additional inputs.
# --
properties:
logstash_parser:
debug: false
inputs: [ { plugin: "file", options: { path: "my/path/to/file" } }, { plugin: "syslog", options: { host: "127.0.0.1", port: 123} } ]
outputs: [ { plugin: "elasticsearch", options: {} } ]
elasticsearch:
document_id: ~
index: "logs-%{[@metadata][index]}-%{+YYYY.MM.dd}"
index_type: "%{@type}"
data_hosts: [127.0.0.1]
flush_size: 500
redis:
host: 127.0.0.1
port: 6379
key: logstash
Loading

0 comments on commit 5e1fbea

Please sign in to comment.