Skip to content

Commit

Permalink
Allow disabling default elasticsearch output for parser job
Browse files Browse the repository at this point in the history
Fix for cloudfoundry-community#8.
Property 'logstash_parser.outputs' is set with list of logstash output plugins, by default it is set to 'elasticsearch' plugin.
Rename 'logstash_parser' elasticsearch properties to follow the same naming convention - 'logstash_parser.elasticsearch.*'.
Add logstash-templates tests to test input_and_output.conf.erb template with different configurations.
Minor reqork in test scripts - all tests are run with default rake task,
remove target folder from git.
Use FileUtils.* methods in Rakefile 'cleanup' tasks.
  • Loading branch information
hannayurkevich committed Nov 28, 2016
1 parent e77c9d5 commit 0b3fcf0
Show file tree
Hide file tree
Showing 28 changed files with 401 additions and 370 deletions.
36 changes: 22 additions & 14 deletions jobs/parser/spec
Original file line number Diff line number Diff line change
Expand Up @@ -44,23 +44,38 @@ properties:
password: c1oudbunny
default: [ { plugin: "redis", options : {} } ]
logstash_parser.outputs:
description: "The configuration to embed into the logstash outputs section"
description: |
A list of output plugins, with a hash of options for each of them. Please refer to example below.
example:
inputs:
- plugin: mongodb
options:
uri: 192.168.1.1
database: logsearch
collection: logs
default: [ { plugin: "elasticsearch", options: {} } ]
logstash_parser.workers:
description: "The number of worker threads that logstash should use (default: auto = one per CPU)"
default: auto
logstash_parser.idle_flush_time:
description: "How frequently to flush events if the output queue is not full."
logstash_parser.elasticsearch_document_id:
logstash_parser.elasticsearch.idle_flush_time:
description: "How frequently to flush events if the output queue is not full."
logstash_parser.elasticsearch.document_id:
description: "Use a specific, dynamic ID rather than an auto-generated identifier."
default: ~
logstash_parser.elasticsearch_index:
logstash_parser.elasticsearch.index:
description: "The specific, dynamic index name to write events to."
default: "logstash-%{+YYYY.MM.dd}"
logstash_parser.elasticsearch_index_type:
logstash_parser.elasticsearch.index_type:
description: "The specific, dynamic index type name to write events to."
default: "%{@type}"
logstash_parser.elasticsearch_routing:
logstash_parser.elasticsearch.routing:
description: "The routing to be used when indexing a document."
logstash.elasticsearch.data_hosts:
description: The list of elasticsearch data node IPs
default: [127.0.0.1]
logstash.elasticsearch.flush_size:
description: Controls how many logs will be buffered and sent to Elasticsearch for bulk indexing
default: 500
logstash_parser.timecop.reject_greater_than_hours:
description: "Logs with timestamps greater than this many hours in the future won't be parsed and will get tagged with fail/timecop"
default: 1
Expand All @@ -74,13 +89,6 @@ properties:
description: "A list of index templates that need to be present in ElasticSearch before the process starts"
default: ["index_template"]

logstash.output.elasticsearch.data_hosts:
description: The list of elasticsearch data node IPs
default: [127.0.0.1]
logstash.output.elasticsearch.flush_size:
description: Controls how many logs will be buffered and sent to Elasticsearch for bulk indexing
default: 500

redis.host:
description: Redis host of queue
redis.port:
Expand Down
2 changes: 1 addition & 1 deletion jobs/parser/templates/bin/parser_ctl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ source /var/vcap/jobs/parser/helpers/ctl_setup.sh 'parser'

function wait_for_template {
local template_name="$1"
local MASTER_URL="<%= p("logstash.output.elasticsearch.data_hosts").first %>:9200"
local MASTER_URL="<%= p("logstash_parser.elasticsearch.data_hosts").first %>:9200"

set +e
while true; do
Expand Down
41 changes: 25 additions & 16 deletions jobs/parser/templates/config/input_and_output.conf.erb
Original file line number Diff line number Diff line change
Expand Up @@ -27,22 +27,31 @@ output {
}
<% end %>

elasticsearch {
hosts => [<%= p("logstash.output.elasticsearch.data_hosts").map { |ip| "\"#{ip}:9200\"" }.join(',') %>]
flush_size => <%= p("logstash.output.elasticsearch.flush_size") %>
<% if p('logstash_parser.idle_flush_time', nil) %>
idle_flush_time => <%= p('logstash_parser.idle_flush_time') %>
<% end %>
<% if p('logstash_parser.elasticsearch_document_id', nil) %>
document_id => "<%= p('logstash_parser.elasticsearch_document_id') %>"

<% p('logstash_parser.outputs').each do | output | %>
<%= output['plugin'] %> {
<% if 'elasticsearch' == output['plugin'] %>
<% output['options'] = {
"hosts" => [ p('logstash_parser.elasticsearch.data_hosts').map { |ip| "#{ip}:9200" }.join(',') ],
"flush_size" => p('logstash_parser.elasticsearch.flush_size'),
"index" => p('logstash_parser.elasticsearch.index'),
"document_type" => p('logstash_parser.elasticsearch.index_type'),
"manage_template" => false
}
if p('logstash_parser.elasticsearch.idle_flush_time', nil)
output['options']['idle_flush_time'] = p('logstash_parser.elasticsearch.idle_flush_time')
end
if p('logstash_parser.elasticsearch.document_id', nil)
output['options']['document_id'] = p('logstash_parser.elasticsearch.document_id')
end
if p('logstash_parser.elasticsearch.routing', nil)
output['options']['routing'] = p('logstash_parser.elasticsearch.routing')
end
%>
<% end %>
index => "<%= p('logstash_parser.elasticsearch_index') %>"
document_type => "<%= p('logstash_parser.elasticsearch_index_type') %>"
manage_template => false
<% if p('logstash_parser.elasticsearch_routing', nil) %>
routing => "<%= p('logstash_parser.elasticsearch_routing') %>"
<% output['options'].each do | k, v | %>
<%= k %> => <%= v.inspect %>
<% end %>
}

<%= p('logstash_parser.outputs', '') %>
}
<% end %>
}
12 changes: 7 additions & 5 deletions src/logsearch-config/Rakefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,24 @@ require 'yaml'
require 'json'

task :clean do
mkdir_p "target"
rm_rf "target/*"
FileUtils.mkdir_p('target')
FileUtils.rm_rf(Dir.glob('target/*'))
end

desc "Builds filters & dashboards"
task :build => :clean do
puts "===> Building ..."
compile_erb 'src/logstash-filters/default.conf.erb', 'target/logstash-filters-default.conf'
compile_erb 'src/logstash-filters/cluster_monitor.conf.erb', 'target/logstash-filters-monitor.conf'
cp 'src/logstash-filters/snippets/metric.conf', 'target/logstash-filters-metric.conf'
cp 'src/logstash-filters/deployment_lookup.yml', 'target/deployment_lookup.yml'

puts "===> Artifacts:"
puts `find target`
end

desc "Runs all tests"
task test: [
task default: [
:test_filters,
:test_templates
]
Expand Down Expand Up @@ -48,8 +50,8 @@ task :test_templates, [:rspec_files] => :compile_templates do |t, args|
end

task :clean_templates do
mkdir_p 'test/logstash-templates/target'
rm_rf 'test/logstash-templates/target/*'
FileUtils.mkdir_p('test/logstash-templates/target')
FileUtils.rm_rf(Dir.glob('test/logstash-templates/target/*'))
end

desc "Compile bosh templates for tests"
Expand Down
6 changes: 3 additions & 3 deletions src/logsearch-config/bin/test
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ BASE_DIR=$(cd $SCRIPT_DIR/.. ; pwd)
cd $BASE_DIR

if [ $# -eq 0 ]
then rake test
then rake
elif [ $# -eq 1 ]
then rake test $1
then rake $1
elif [ $# -eq 2 ]
then rake test $1["$2"]
then rake $1["$2"]
fi
Empty file.
10 changes: 0 additions & 10 deletions src/logsearch-config/target/deployment_lookup.yml

This file was deleted.

205 changes: 0 additions & 205 deletions src/logsearch-config/target/logstash-filters-default.conf

This file was deleted.

Loading

0 comments on commit 0b3fcf0

Please sign in to comment.