diff --git a/.env b/.env index 39be639d4..3dee14551 100644 --- a/.env +++ b/.env @@ -27,6 +27,22 @@ NEXTAUTH_SECRET=secret RETRACED_HOST_URL=http://retraced-api:3000/auditlog RETRACED_EXTERNAL_URL=http://localhost:3000/auditlog +# Export Logs +EXPORT_DATADOG_API_KEY= +EXPORT_WEBHOOK_URL=http://vector:9000 +EXPORT_WEBHOOK_USERNAME=admin +EXPORT_WEBHOOK_PASSWORD=admin +EXPORT_DDSOURCE=local-dev-machine +EXPORT_DDTAGS="Audit-Logs, Retraced, BoxyHQ" +EXPORT_DATADOG_HOSTNAME="127.0.0.1" +EXPORT_SERVICE="Retraced-audit-logs" +EXPORT_DATADOG_REGION=us +EXPORT_DATADOG_SITE=datadoghq.com +# S3 Bucket +EXPORT_S3_BUCKET= +EXPORT_S3_REGION= +EXPORT_S3_ACCESS_KEY_ID= +EXPORT_S3_SECRET_ACCESS_KEY= # OpenTelemetry # https://opentelemetry.io/docs/concepts/sdk-configuration/otlp-exporter-configuration/ # If you have any issues with using the otel exporter and want to enable debug logs @@ -40,4 +56,4 @@ OTEL_EXPORTER_OTLP_METRICS_HEADERS= GEOIPUPDATE_LICENSE_KEY= GEOIPUPDATE_ACCOUNT_ID= GEOIPUPDATE_USE_MMDB= -GEOIPUPDATE_DB_DIR=/etc/mmdb \ No newline at end of file +GEOIPUPDATE_DB_DIR=/etc/mmdb diff --git a/.gitignore b/.gitignore index a7e93f7ec..186177845 100644 --- a/.gitignore +++ b/.gitignore @@ -106,5 +106,7 @@ test-results.xml .env.development.local .env.test.local .env.production.local +vector/* +minio/* mmdb/**/** -GeoIP.conf \ No newline at end of file +GeoIP.conf diff --git a/docker-compose.yaml b/docker-compose.yaml index f5a2c4a11..80c782f5e 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -131,16 +131,16 @@ services: # networks: # - retraced - # kibana: - # image: docker.elastic.co/kibana/kibana:7.8.0 - # environment: - # - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 - # networks: - # - retraced - # depends_on: - # - elasticsearch - # ports: - # - 5601:5601 + kibana: + image: docker.elastic.co/kibana/kibana:7.8.0 + environment: + - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 + networks: + - retraced + depends_on: + - elasticsearch + ports: + - 5601:5601 retraced-dev-bootstrap: build: @@ -221,5 +221,41 @@ services: depends_on: - "retraced-api" restart: "always" + + vector: + image: timberio/vector:0.X-alpine + environment: + - EXPORT_WEBHOOK_USERNAME=${EXPORT_WEBHOOK_USERNAME} + - EXPORT_WEBHOOK_PASSWORD=${EXPORT_WEBHOOK_PASSWORD} + - EXPORT_DDSOURCE=${EXPORT_DDSOURCE} + - EXPORT_DDTAGS=${EXPORT_DDTAGS} + - EXPORT_DATADOG_HOSTNAME=${EXPORT_DATADOG_HOSTNAME} + - EXPORT_SERVICE=${EXPORT_SERVICE} + - EXPORT_DATADOG_API_KEY=${EXPORT_DATADOG_API_KEY} + - EXPORT_DATADOG_REGION=${EXPORT_DATADOG_REGION} + - EXPORT_DATADOG_SITE=${EXPORT_DATADOG_SITE} + - EXPORT_S3_BUCKET=${EXPORT_S3_BUCKET} + - EXPORT_S3_REGION=${EXPORT_S3_REGION} + - EXPORT_S3_ACCESS_KEY_ID=${EXPORT_S3_ACCESS_KEY_ID} + - EXPORT_S3_SECRET_ACCESS_KEY=${EXPORT_S3_SECRET_ACCESS_KEY} + volumes: + - ./vector.toml:/etc/vector/vector.toml + - ./vector/data:/var/lib/vector/ + networks: + - retraced + depends_on: + - "minio" + - "elasticsearch" + + minio: + image: quay.io/minio/minio:latest + ports: + - "9002:9000" + - "9001:9001" + volumes: + - ./minio/data:/data + networks: + - retraced + command: server /data --console-address ":9001" volumes: mmdb: diff --git a/src/_processor/workers/saveEventToElasticsearch.ts b/src/_processor/workers/saveEventToElasticsearch.ts index aa512bddf..d94eb47d9 100644 --- a/src/_processor/workers/saveEventToElasticsearch.ts +++ b/src/_processor/workers/saveEventToElasticsearch.ts @@ -2,6 +2,7 @@ import _ from "lodash"; import moment from "moment"; import { Clock } from "../common"; import { ClientWithRetry, getESWithRetry } from "../../persistence/elasticsearch"; +import sendToWebhook from "../../ee/export/index"; import { instrumented, recordOtelHistogram } from "../../metrics/opentelemetry/instrumentation"; export class ElasticsearchSaver { @@ -25,6 +26,7 @@ export class ElasticsearchSaver { const alias = `retraced.${jobObj.projectId}.${jobObj.environmentId}.current`; try { await this.esIndex(event, alias); + sendToWebhook(event); } catch (e) { e.retry = true; throw e; diff --git a/src/config.ts b/src/config.ts index 49ecd6e87..f309ae198 100644 --- a/src/config.ts +++ b/src/config.ts @@ -72,6 +72,9 @@ export default { env.RETRACED_NO_ANALYTICS || process.env.DO_NOT_TRACK || env.DO_NOT_TRACK, + EXPORT_WEBHOOK_URL: process.env.EXPORT_WEBHOOK_URL || env.EXPORT_WEBHOOK_URL, + EXPORT_WEBHOOK_USERNAME: process.env.EXPORT_WEBHOOK_USERNAME || env.EXPORT_WEBHOOK_USERNAME, + EXPORT_WEBHOOK_PASSWORD: process.env.EXPORT_WEBHOOK_PASSWORD || env.EXPORT_WEBHOOK_PASSWORD, GEOIPUPDATE_USE_MMDB: process.env.GEOIPUPDATE_USE_MMDB || env.GEOIPUPDATE_USE_MMDB, GEOIPUPDATE_DB_DIR: process.env.GEOIPUPDATE_DB_DIR || env.GEOIPUPDATE_DB_DIR || "/etc/mmdb", }; diff --git a/src/ee/ENTERPRISE.md b/src/ee/ENTERPRISE.md new file mode 100644 index 000000000..86d37b62c --- /dev/null +++ b/src/ee/ENTERPRISE.md @@ -0,0 +1,7 @@ +# Enterprise Edition + +Welcome to the Enterprise Edition ("/ee") of BoxyHQ. + +The [/ee](https://github.com/retracedhq/retraced/tree/main/ee) subfolder is the place for all the **Enterprise** features for this repository. + +> _❗ NOTE: This section is copyrighted (unlike the rest of our [repository](https://github.com/retracedhq/retraced)). You are not allowed to use this code without obtaining a proper [license](https://boxyhq.com/pricing) first.❗_ diff --git a/src/ee/LICENSE b/src/ee/LICENSE new file mode 100644 index 000000000..e315ec834 --- /dev/null +++ b/src/ee/LICENSE @@ -0,0 +1 @@ +The BoxyHQ Enterprise Edition (EE) license (the “EE License”) diff --git a/src/ee/export/Readme.md b/src/ee/export/Readme.md new file mode 100644 index 000000000..673b763ee --- /dev/null +++ b/src/ee/export/Readme.md @@ -0,0 +1,3 @@ +# Export Audit-logs in real time using Vector + +This feature uses vector to export auditlogs as they get indexed to datadog & other destinations. diff --git a/src/ee/export/index.ts b/src/ee/export/index.ts new file mode 100644 index 000000000..fb4a49cff --- /dev/null +++ b/src/ee/export/index.ts @@ -0,0 +1,25 @@ +import axios from "axios"; +import config from "../../config"; +import { logger } from "../../logger"; + +export default function sendToWebhook(event: any): void { + if (config.EXPORT_WEBHOOK_URL) { + delete event.raw; + axios + .post(config.EXPORT_WEBHOOK_URL, event, { + auth: + config.EXPORT_WEBHOOK_USERNAME && config.EXPORT_WEBHOOK_PASSWORD + ? { + username: config.EXPORT_WEBHOOK_USERNAME, + password: config.EXPORT_WEBHOOK_PASSWORD, + } + : undefined, + }) + .catch(() => { + logger.info(`[VECTOR EXPORT] Failed to send to webhook`); + }) + .then(() => { + logger.info(`[VECTOR EXPORT] Sent to webhook`); + }); + } +} diff --git a/vector.toml b/vector.toml new file mode 100644 index 000000000..4dc878d89 --- /dev/null +++ b/vector.toml @@ -0,0 +1,128 @@ +data_dir = "/var/lib/vector/" +[api] + enabled = true +# The data source that Vector will collect logs from +[sources.webhook] +type = "http_server" # The protocol to use +address = "0.0.0.0:9000" # The address to bind to +healthcheck = true # Enable built-in health checks +body_size_limit = "1mb" # Maximum size of request body +auth.password = "${EXPORT_WEBHOOK_PASSWORD}" +auth.username = "${EXPORT_WEBHOOK_USERNAME}" + +# The transformation(s) to apply to each event for DataDog +[transforms.add_datadog_info] +type = "remap" +inputs = [ "webhook" ] +source = """ +# Set the values of the output object +.ddsource = "${EXPORT_DDSOURCE}" +.ddtags = "${EXPORT_DDTAGS}" +.hostname = "${EXPORT_DATADOG_HOSTNAME}" +.service = "${EXPORT_SERVICE}" +""" + +# The transformation(s) to apply to each event for s3 +[transforms.s3_transform] +type = "remap" +inputs = [ "webhook" ] +source = """ +# Set the values of the output object +. = parse_json!(.message) +""" + +[transforms.ecs_transform] +type = "remap" +inputs = [ "webhook" ] +source = """ +# Set the values of the output object +. = parse_json!(.message) +.host.name = "localhost" +.host.ip = .source_ip +.event.action = .action +.event.code = .crud +.event.module = .component +if .event.received == null { + .event.received = now() +} else { + .event.received = format_timestamp!(.event.received, format: "%+") +} +.event.dataset = "Audit Log" +.user.id = .actor.id +.user.name = .actor.name +.user.domain = .actor.href +.service.id = .target.id +.service.name = .target.name +.service.address = .target.href +.service.type = .target.type +.group.id = .group.id +.group.name = .group.name +.group.domain = .group.href +.source.ip = .source_ip +.message = .description +if .event.is_failure == true { + .event.outcome = "failure" + } else { + .event.outcome = "success" +} +if .event.created == null { + .@timestamp = now() +} else { + .@timestamp = format_timestamp!(.event.created, format: "%+") +} +del(.actor) +del(.target) +del(.received) +del(.action) +del(.crud) +del(.is_failure) +del(.component) +del(.group) +del(.source_ip) +del(.description) +del(.created) +del(.canonical_time) +""" + +# The destination(s) to send the events to +[sinks.datadog_sink] +type = "datadog_logs" +inputs = [ "add_datadog_info" ] +default_api_key = "${EXPORT_DATADOG_API_KEY}" +compression = "gzip" +region = "${EXPORT_DATADOG_REGION}" +site = "${EXPORT_DATADOG_SITE}" +acknowledgements.enabled = true +healthcheck.enabled = true +request.concurrency = 10 +request.rate_limit_duration_secs = 1 +request.rate_limit_num = 10 +buffer.type = "disk" +# 1GB +buffer.max_size = 1073741952 + +[sinks.s3_sink] +type = "aws_s3" +inputs = ["s3_transform"] +bucket = "test" +region = "${EXPORT_S3_REGION}" +endpoint = "http://minio:9000" +encoding.codec = "json" +acknowledgements.enabled = true +auth.access_key_id="${EXPORT_S3_ACCESS_KEY_ID}" +auth.secret_access_key="${EXPORT_S3_SECRET_ACCESS_KEY}" +batch.max_events = 1000 + +[sinks.ecs_sink] +type = "elasticsearch" +inputs = [ "ecs_transform" ] +acknowledgements.enabled = true +api_version = "v7" +auth.strategy = "basic" +auth.user = "elastic" +auth.password = "changeme" +buffer.type = "memory" +buffer.max_events = 10 +bulk.action = "index" +bulk.index = "vector-%Y-%m-%d" +endpoints = ["http://elasticsearch:9200"] \ No newline at end of file