diff --git a/.circleci/config.yml b/.circleci/config.yml
new file mode 100644
index 0000000..326a058
--- /dev/null
+++ b/.circleci/config.yml
@@ -0,0 +1,37 @@
+version: '2'
+jobs:
+ build:
+ docker:
+ - image: ubuntu:yakkety
+ working_directory: ~/code
+ steps:
+ - setup_remote_docker
+ - run:
+ name: Install Docker client
+ command: |
+ set -x
+ VER="17.03.0-ce"
+ apt-get update -q
+ apt-get install -yq curl python
+ curl -L -o /tmp/docker-$VER.tgz https://get.docker.com/builds/Linux/x86_64/docker-$VER.tgz
+ tar -xz -C /tmp -f /tmp/docker-$VER.tgz
+ mv /tmp/docker/* /usr/bin
+ - run:
+ name: Install docker-compose
+ command: |
+ set -x
+ curl -L https://github.com/docker/compose/releases/download/1.11.2/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
+ chmod +x /usr/local/bin/docker-compose
+ - checkout
+ - run:
+ name: Run basic tests
+ working_directory: ~/code/tests
+ command: |
+ bash run_tests.sh | tee /tmp/test.log || true # The test command always exits non-zero
+ grep -v -E 'FAILED|ERROR' /tmp/test.log || exit 1
+ - run:
+ name: Run integration tests
+ working_directory: ~/code/tests/integration
+ command: |
+ chmod +x set_cluster_name wait_for_es
+ bash ./run.sh
diff --git a/circle.yml b/circle.yml
deleted file mode 100644
index 43f2829..0000000
--- a/circle.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-general:
- build_dir: tests
-test:
- override:
- - bash run_tests.sh | tee /tmp/test.log
- - grep -v -E 'FAILED|ERROR' /tmp/test.log
diff --git a/elasticsearch_collectd.py b/elasticsearch_collectd.py
index 2f076c7..996ab14 100755
--- a/elasticsearch_collectd.py
+++ b/elasticsearch_collectd.py
@@ -660,6 +660,8 @@ def configure_callback(conf):
c.defaults.add(metric_name)
elif node.key == "IndexStatsMasterOnly":
c.master_only = str_to_bool(node.values[0])
+ elif node.key == "Dimensions":
+ c.extra_dimensions = node.values[0]
else:
log.warning('Unknown config key: %s.' % node.key)
@@ -757,6 +759,8 @@ def __init__(self):
self.es_current_master = False
self.node_id = None
+ self.extra_dimensions = ''
+
def sanatize_intervals(self):
"""Sanitizes the index interval to be greater or equal to and divisible by
the collection interval
@@ -1065,9 +1069,9 @@ def dispatch_stat(self, result, name, key, dimensions=None):
# If dimensions are provided, format them and append
# them to the plugin_instance
- if dimensions:
- val.plugin_instance += '[{dims}]'.format(dims=','.join(['='.join(d)
- for d in dimensions.items()]))
+ dim_str = self.get_dimension_string(dimensions)
+ if dim_str:
+ val.plugin_instance += '[{dims}]'.format(dims=dim_str)
val.type = estype
val.type_instance = name
@@ -1076,6 +1080,16 @@ def dispatch_stat(self, result, name, key, dimensions=None):
log.info('Emitting value: %s' % val)
val.dispatch()
+ def get_dimension_string(self, dimensions):
+ dim_str = ''
+ if dimensions:
+ dim_str = ','.join(['='.join(d) for d in dimensions.items()])
+
+ if self.extra_dimensions:
+ dim_str += "%s%s" % (',' if dim_str else '', self.extra_dimensions)
+
+ return dim_str
+
def sanitize_type_instance(index_name):
"""
diff --git a/tests/integration/.env b/tests/integration/.env
new file mode 100644
index 0000000..8fe3bca
--- /dev/null
+++ b/tests/integration/.env
@@ -0,0 +1 @@
+COMPOSE_PROJECT_NAME=collectd-elasticsearch-int
diff --git a/tests/integration/.gitignore b/tests/integration/.gitignore
new file mode 100644
index 0000000..818d16f
--- /dev/null
+++ b/tests/integration/.gitignore
@@ -0,0 +1 @@
+Dockerfile.es.*
diff --git a/tests/integration/20-elasticsearch-test.conf b/tests/integration/20-elasticsearch-test.conf
new file mode 100644
index 0000000..3ad1069
--- /dev/null
+++ b/tests/integration/20-elasticsearch-test.conf
@@ -0,0 +1,28 @@
+
+ Globals true
+
+
+
+ ModulePath "/usr/share/collectd/collectd-elasticsearch"
+
+ Import "elasticsearch_collectd"
+
+
+ Interval 3
+ IndexInterval 3
+ Host es17
+ Dimensions "testdim=5"
+
+
+
+ Interval 3
+ IndexInterval 3
+ Host es24
+
+
+
+ Interval 3
+ IndexInterval 3
+ Host es53
+
+
diff --git a/tests/integration/Dockerfile.collectd b/tests/integration/Dockerfile.collectd
new file mode 100644
index 0000000..e520a2e
--- /dev/null
+++ b/tests/integration/Dockerfile.collectd
@@ -0,0 +1,15 @@
+FROM quay.io/signalfuse/collectd:latest
+
+# Disable everything we can except elasticsearch
+ENV COLLECTD_INTERVAL=3 COLLECTD_HOSTNAME=es-test DISABLE_AGGREGATION=true DISABLE_CPU=true DISABLE_CPUFREQ=true DISABLE_DF=true DISABLE_DISK=true DISABLE_DOCKER=true DISABLE_HOST_MONITORING=true DISABLE_INTERFACE=true DISABLE_LOAD=true DISABLE_MEMORY=true DISABLE_PROTOCOLS=true DISABLE_VMEM=true DISABLE_UPTIME=true
+
+# Debian is super minimalistic
+RUN apt-get update &&\
+ apt-get install -yq netcat
+
+CMD /.docker/wait_for_es
+ADD tests/integration/wait_for_es /.docker/wait_for_es
+
+## The context of the image build should be the root dir of this repo!!
+ADD elasticsearch_collectd.py /usr/share/collectd/collectd-elasticsearch/
+ADD tests/integration/20-elasticsearch-test.conf /etc/collectd/managed_config/
diff --git a/tests/integration/Dockerfile.es b/tests/integration/Dockerfile.es
new file mode 100644
index 0000000..e50af29
--- /dev/null
+++ b/tests/integration/Dockerfile.es
@@ -0,0 +1,5 @@
+FROM elasticsearch:ES_VERSION
+
+CMD /set_cluster_name
+
+ADD set_cluster_name /set_cluster_name
diff --git a/tests/integration/Dockerfile.sink b/tests/integration/Dockerfile.sink
new file mode 100644
index 0000000..9696480
--- /dev/null
+++ b/tests/integration/Dockerfile.sink
@@ -0,0 +1,6 @@
+FROM python:2
+
+EXPOSE 80 8080
+
+ADD sink.py /opt/sink.py
+CMD python -u /opt/sink.py
diff --git a/tests/integration/Dockerfile.test b/tests/integration/Dockerfile.test
new file mode 100644
index 0000000..8823264
--- /dev/null
+++ b/tests/integration/Dockerfile.test
@@ -0,0 +1,4 @@
+FROM python:2
+
+ADD test.py /opt/test.py
+CMD python -u /opt/test.py
diff --git a/tests/integration/docker-compose.yml b/tests/integration/docker-compose.yml
new file mode 100644
index 0000000..5368d21
--- /dev/null
+++ b/tests/integration/docker-compose.yml
@@ -0,0 +1,46 @@
+---
+version: '2'
+services:
+ collectd:
+ build:
+ context: ../..
+ dockerfile: tests/integration/Dockerfile.collectd
+ environment:
+ SF_API_TOKEN: testing
+ SF_INGEST_HOST: fake_sfx
+ depends_on:
+ - fake_sfx
+ - es17
+ - es24
+ - es53
+
+ es17:
+ build:
+ context: .
+ dockerfile: Dockerfile.es.1.7.6
+
+ es24:
+ build:
+ context: .
+ dockerfile: Dockerfile.es.2.4.5
+
+ es53:
+ build:
+ context: .
+ dockerfile: Dockerfile.es.5.3.2
+
+ fake_sfx:
+ build:
+ context: .
+ dockerfile: Dockerfile.sink
+
+ test:
+ build:
+ context: .
+ dockerfile: Dockerfile.test
+ depends_on:
+ - collectd
+
+
+networks:
+ default: {}
diff --git a/tests/integration/make-es-dockerfiles b/tests/integration/make-es-dockerfiles
new file mode 100755
index 0000000..7d34658
--- /dev/null
+++ b/tests/integration/make-es-dockerfiles
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+for version in 1.7.6 2.4.5 5.3.2
+do
+ sed -e "s/ES_VERSION/$version/" Dockerfile.es > Dockerfile.es.$version
+done
diff --git a/tests/integration/run.sh b/tests/integration/run.sh
new file mode 100755
index 0000000..2d73e13
--- /dev/null
+++ b/tests/integration/run.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+cd $DIR
+
+./make-es-dockerfiles
+
+docker-compose run --rm -T test
+status=$?
+
+docker-compose stop -t0
+
+exit $status
diff --git a/tests/integration/set_cluster_name b/tests/integration/set_cluster_name
new file mode 100755
index 0000000..2dcaaac
--- /dev/null
+++ b/tests/integration/set_cluster_name
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+cat <> /etc/default/elasticsearch
+ES_HEAP_SIZE=128m
+MAX_LOCKED_MEMORY=100000
+EOH
+
+cat < /usr/share/elasticsearch/config/elasticsearch.yml
+cluster.name: es-${ELASTICSEARCH_VERSION}
+http.host: 0.0.0.0
+EOH
+
+exec /docker-entrypoint.sh elasticsearch
diff --git a/tests/integration/sink.py b/tests/integration/sink.py
new file mode 100644
index 0000000..8a28047
--- /dev/null
+++ b/tests/integration/sink.py
@@ -0,0 +1,61 @@
+from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
+import json
+import signal
+import threading
+from time import time
+
+# This module collects metrics from collectd and can echo them back out for
+# making assertions on the collected metrics.
+
+
+# Fake the /v1/collectd endpoint and just stick all of the metrics in a
+# list
+def run_fake_ingest(metric_data):
+ class FakeCollectdIngest(BaseHTTPRequestHandler):
+ def do_POST(self):
+ body = self.rfile.read(int(self.headers.getheader('Content-Length')))
+
+ metric_data.extend(json.loads(body))
+
+ self.send_response(200)
+ self.send_header("Content-Type", "text/ascii")
+ self.send_header("Content-Length", "2")
+ self.end_headers()
+ self.wfile.write("OK")
+
+ print 'Starting ingest server on port 80'
+ httpd = HTTPServer(('', 80), FakeCollectdIngest)
+ httpd.serve_forever()
+ print 'Ingest server shutting down'
+
+
+# Dumps all of the collected metrics back out as JSON upon request
+def serve_metric_data(metric_data):
+ class MetricDataSpewer(BaseHTTPRequestHandler):
+ def do_GET(self):
+ data = json.dumps(metric_data)
+ self.send_response(200)
+ self.send_header("Content-Type", "application/json")
+ self.send_header("Content-Length", str(len(data)))
+ self.end_headers()
+ print data
+ self.rfile.write(data)
+
+ print 'Starting metric spewer on port 8080'
+ httpd = HTTPServer(('', 8080), MetricDataSpewer)
+ httpd.serve_forever()
+ print 'Metric spewer shutting down'
+
+
+if __name__ == "__main__":
+ # Lists are thread-safe due to the GIL
+ metric_data = []
+ t1 = threading.Thread(target=run_fake_ingest, args=(metric_data,))
+ t2 = threading.Thread(target=serve_metric_data, args=(metric_data,))
+
+ t1.start()
+ t2.start()
+
+ t1.join()
+ t2.join()
+
diff --git a/tests/integration/test.py b/tests/integration/test.py
new file mode 100644
index 0000000..cf32559
--- /dev/null
+++ b/tests/integration/test.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+
+import httplib
+import json
+from time import time, sleep
+
+# Quick and dirty integration test for multi-cluster support in one collectd
+# instance. This test script is intended to be run with docker-compose with the
+# provided docker-compose.yml configuration.
+
+# This is not very flexible but could be expanded to support other types of
+# integration tests if so desired.
+
+VERSIONS_TESTED = [
+ '1.7.6',
+ '2.4.5',
+ '5.3.2',
+]
+TIMEOUT_SECS = 60
+
+
+def get_metric_data():
+ # Use httplib instead of requests so we don't have to install stuff with pip
+ conn = httplib.HTTPConnection("fake_sfx", 8080)
+ conn.request("GET", "/")
+ resp = conn.getresponse()
+ conn.close()
+ return json.loads(resp.read())
+
+
+def wait_for_metrics_from_each_cluster():
+ start = time()
+ for cluster in ['es-' + v for v in VERSIONS_TESTED]:
+ print 'Waiting for metrics from cluster %s...' % (cluster,)
+ eventually_true(lambda: any([cluster in m.get('plugin_instance') for m in get_metric_data()]),
+ TIMEOUT_SECS - (time() - start))
+ print 'Found!'
+
+
+def eventually_true(f, timeout_secs):
+ start = time()
+ while True:
+ try:
+ assert f()
+ except AssertionError:
+ if time() - start > timeout_secs:
+ raise
+ sleep(0.5)
+ else:
+ break
+
+
+if __name__ == "__main__":
+ wait_for_metrics_from_each_cluster()
diff --git a/tests/integration/wait_for_es b/tests/integration/wait_for_es
new file mode 100755
index 0000000..94652b6
--- /dev/null
+++ b/tests/integration/wait_for_es
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+wait_for () {
+ host=$1
+ while ! nc -z $host 9200
+ do
+ sleep 0.2
+ done
+}
+
+for host in es17 es24 es53
+do
+ wait_for $host
+done
+
+exec /.docker/run.sh