diff --git a/README.md b/README.md index b7e8ad1..e423577 100644 --- a/README.md +++ b/README.md @@ -107,4 +107,5 @@ Add the environment variable `JAVA_TOOL_OPTIONS` to your Lambda functions and se - Aws SDK V1 - Aws SDK V2 -- Apache HTTP Client \ No newline at end of file +- Apache HTTP Client +- Apache Kafka \ No newline at end of file diff --git a/findbugs/findbugs-exclude.xml b/findbugs/findbugs-exclude.xml index 4536193..3a62d71 100644 --- a/findbugs/findbugs-exclude.xml +++ b/findbugs/findbugs-exclude.xml @@ -2,4 +2,13 @@ + + + + + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml index 8e4ee0c..d3c29bd 100644 --- a/pom.xml +++ b/pom.xml @@ -68,7 +68,7 @@ com.amazonaws aws-lambda-java-events - 2.2.6 + 3.11.5 com.amazonaws @@ -133,6 +133,13 @@ 2.25.45 + + + org.apache.kafka + kafka-clients + 3.1.0 + + com.fasterxml.jackson.core diff --git a/src/main/java/io/lumigo/core/SpansContainer.java b/src/main/java/io/lumigo/core/SpansContainer.java index 50a12ef..07813ef 100644 --- a/src/main/java/io/lumigo/core/SpansContainer.java +++ b/src/main/java/io/lumigo/core/SpansContainer.java @@ -13,17 +13,25 @@ import io.lumigo.core.utils.JsonUtils; import io.lumigo.core.utils.SecretScrubber; import io.lumigo.core.utils.StringUtils; +import io.lumigo.models.*; import io.lumigo.models.HttpSpan; -import io.lumigo.models.Reportable; import io.lumigo.models.Span; import java.io.*; import java.util.*; import java.util.concurrent.Callable; +import lombok.Getter; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; import org.apache.http.client.methods.HttpUriRequest; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.consumer.internals.ConsumerMetadata; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.clients.producer.internals.ProducerMetadata; +import org.apache.kafka.common.serialization.Serializer; import org.pmw.tinylog.Logger; import software.amazon.awssdk.awscore.AwsResponse; import software.amazon.awssdk.core.SdkResponse; @@ -41,14 +49,16 @@ public class SpansContainer { private static final String AMZN_TRACE_ID = "_X_AMZN_TRACE_ID"; private static final String FUNCTION_SPAN_TYPE = "function"; private static final String HTTP_SPAN_TYPE = "http"; - private static final SecretScrubber secretScrubber = new SecretScrubber(new EnvUtil().getEnv()); + public static final String KAFKA_SPAN_TYPE = "kafka"; private Span baseSpan; - private Span startFunctionSpan; + @Getter private Span startFunctionSpan; private Long rttDuration; private Span endFunctionSpan; private Reporter reporter; - private List httpSpans = new LinkedList<>(); + private SecretScrubber secretScrubber = new SecretScrubber(new EnvUtil().getEnv()); + @Getter private List spans = new LinkedList<>(); + private static final SpansContainer ourInstance = new SpansContainer(); public static SpansContainer getInstance() { @@ -63,7 +73,7 @@ public void clear() { rttDuration = null; endFunctionSpan = null; reporter = null; - httpSpans = new LinkedList<>(); + spans = new LinkedList<>(); } private SpansContainer() {} @@ -71,6 +81,7 @@ private SpansContainer() {} public void init(Map env, Reporter reporter, Context context, Object event) { this.clear(); this.reporter = reporter; + this.secretScrubber = new SecretScrubber(new EnvUtil().getEnv()); int javaVersion = AwsUtils.parseJavaVersion(System.getProperty("java.version")); if (javaVersion > 11) { @@ -81,6 +92,7 @@ public void init(Map env, Reporter reporter, Context context, Ob Logger.debug("awsTracerId {}", awsTracerId); AwsUtils.TriggeredBy triggeredBy = AwsUtils.extractTriggeredByFromEvent(event); + long startTime = System.currentTimeMillis(); this.baseSpan = Span.builder() @@ -166,8 +178,7 @@ public void start() { .build(); try { - rttDuration = - reporter.reportSpans(prepareToSend(startFunctionSpan, false), MAX_REQUEST_SIZE); + rttDuration = reporter.reportSpans(prepareToSend(startFunctionSpan), MAX_REQUEST_SIZE); } catch (Throwable e) { Logger.error(e, "Failed to send start span"); } @@ -214,14 +225,10 @@ private void end(Span endFunctionSpan) throws IOException { MAX_REQUEST_SIZE); } - public Span getStartFunctionSpan() { - return startFunctionSpan; - } - - public List getAllCollectedSpans() { - List spans = new LinkedList<>(); + public List getAllCollectedSpans() { + List spans = new LinkedList<>(); spans.add(endFunctionSpan); - spans.addAll(httpSpans); + spans.addAll(this.spans); return spans; } @@ -229,10 +236,6 @@ public Span getEndSpan() { return endFunctionSpan; } - public List getHttpSpans() { - return httpSpans; - } - private String getStackTrace(Throwable throwable) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw, true); @@ -307,7 +310,7 @@ public void addHttpSpan(Long startTime, HttpUriRequest request, HttpResponse res response.getStatusLine().getStatusCode()) .build()) .build()); - httpSpans.add(httpSpan); + this.spans.add(httpSpan); } public void addHttpSpan(Long startTime, Request request, Response response) { @@ -366,7 +369,7 @@ public void addHttpSpan(Long startTime, Request request, Response response .build()); AwsSdkV1ParserFactory.getParser(request.getServiceName()) .safeParse(httpSpan, request, response); - httpSpans.add(httpSpan); + this.spans.add(httpSpan); } public void addHttpSpan( @@ -435,7 +438,37 @@ public void addHttpSpan( executionAttributes.getAttribute(SdkExecutionAttribute.SERVICE_NAME)) .safeParse(httpSpan, context); - httpSpans.add(httpSpan); + this.spans.add(httpSpan); + } + + public void addKafkaProduceSpan( + Long startTime, + Serializer keySerializer, + Serializer valueSerializer, + ProducerMetadata producerMetadata, + ProducerRecord record, + RecordMetadata recordMetadata, + Exception exception) { + this.spans.add( + KafkaSpanFactory.createProduce( + this.baseSpan, + startTime, + keySerializer, + valueSerializer, + producerMetadata, + record, + recordMetadata, + exception)); + } + + public void addKafkaConsumeSpan( + Long startTime, + KafkaConsumer consumer, + ConsumerMetadata consumerMetadata, + ConsumerRecords consumerRecords) { + this.spans.add( + KafkaSpanFactory.createConsume( + this.baseSpan, startTime, consumer, consumerMetadata, consumerRecords)); } private static String extractHeaders(Map headers) { @@ -522,18 +555,18 @@ protected static T callIfVerbose(Callable method) { } } - private Reportable prepareToSend(Reportable span, boolean hasError) { - return reduceSpanSize(span.scrub(secretScrubber), hasError); + private BaseSpan prepareToSend(BaseSpan span) { + return reduceSpanSize(span.scrub(secretScrubber), false); } - private List prepareToSend(List spans, boolean hasError) { - for (Reportable span : spans) { + private List prepareToSend(List spans, boolean hasError) { + for (BaseSpan span : spans) { reduceSpanSize(span.scrub(secretScrubber), hasError); } return spans; } - public Reportable reduceSpanSize(Reportable span, boolean hasError) { + public BaseSpan reduceSpanSize(BaseSpan span, boolean hasError) { int maxFieldSize = hasError ? Configuration.getInstance().maxSpanFieldSizeWhenError() diff --git a/src/main/java/io/lumigo/core/configuration/Configuration.java b/src/main/java/io/lumigo/core/configuration/Configuration.java index ed1c3ae..9a4f1dd 100644 --- a/src/main/java/io/lumigo/core/configuration/Configuration.java +++ b/src/main/java/io/lumigo/core/configuration/Configuration.java @@ -28,6 +28,7 @@ public class Configuration { public static final String LUMIGO_MAX_SIZE_FOR_REQUEST = "LUMIGO_MAX_SIZE_FOR_REQUEST"; public static final String LUMIGO_INSTRUMENTATION = "LUMIGO_INSTRUMENTATION"; public static final String LUMIGO_SECRET_MASKING_REGEX = "LUMIGO_SECRET_MASKING_REGEX"; + public static final String LUMIGO_MAX_BATCH_MESSAGE_IDS = "LUMIGO_MAX_BATCH_MESSAGE_IDS"; private static Configuration instance; private LumigoConfiguration inlineConf; @@ -137,4 +138,12 @@ public int maxRequestSize() { LUMIGO_MAX_SIZE_FOR_REQUEST, envUtil.getIntegerEnv(LUMIGO_MAX_RESPONSE_SIZE, 1024 * 500)); } + + public int maxBatchMessageIds() { + int value = envUtil.getIntegerEnv(LUMIGO_MAX_BATCH_MESSAGE_IDS, 20); + if (value == 0) { + value = 20; + } + return value; + } } diff --git a/src/main/java/io/lumigo/core/instrumentation/agent/Loader.java b/src/main/java/io/lumigo/core/instrumentation/agent/Loader.java index bfb17d8..77be428 100644 --- a/src/main/java/io/lumigo/core/instrumentation/agent/Loader.java +++ b/src/main/java/io/lumigo/core/instrumentation/agent/Loader.java @@ -3,9 +3,7 @@ import static net.bytebuddy.matcher.ElementMatchers.nameStartsWith; import static net.bytebuddy.matcher.ElementMatchers.not; -import io.lumigo.core.instrumentation.impl.AmazonHttpClientInstrumentation; -import io.lumigo.core.instrumentation.impl.AmazonHttpClientV2Instrumentation; -import io.lumigo.core.instrumentation.impl.ApacheHttpInstrumentation; +import io.lumigo.core.instrumentation.impl.*; import net.bytebuddy.agent.builder.AgentBuilder; import org.pmw.tinylog.Logger; @@ -17,6 +15,10 @@ public static void instrument(java.lang.instrument.Instrumentation inst) { new AmazonHttpClientInstrumentation(); AmazonHttpClientV2Instrumentation amazonHttpClientV2Instrumentation = new AmazonHttpClientV2Instrumentation(); + ApacheKafkaProducerInstrumentation apacheKafkaInstrumentation = + new ApacheKafkaProducerInstrumentation(); + ApacheKafkaConsumerInstrumentation apacheKafkaConsumerInstrumentation = + new ApacheKafkaConsumerInstrumentation(); AgentBuilder builder = new AgentBuilder.Default() .disableClassFormatChanges() @@ -27,13 +29,28 @@ public static void instrument(java.lang.instrument.Instrumentation inst) { .and( not( nameStartsWith( - "software.amazon.awssdk.core.client.builder.SdkDefaultClientBuilder")))) + AmazonHttpClientV2Instrumentation + .INSTRUMENTATION_PACKAGE_PREFIX))) + .and( + not( + nameStartsWith( + ApacheKafkaProducerInstrumentation + .INSTRUMENTATION_PACKAGE_PREFIX))) + .and( + not( + nameStartsWith( + ApacheKafkaConsumerInstrumentation + .INSTRUMENTATION_PACKAGE_PREFIX)))) .type(apacheHttpInstrumentation.getTypeMatcher()) .transform(apacheHttpInstrumentation.getTransformer()) .type(amazonHttpClientInstrumentation.getTypeMatcher()) .transform(amazonHttpClientInstrumentation.getTransformer()) .type(amazonHttpClientV2Instrumentation.getTypeMatcher()) - .transform(amazonHttpClientV2Instrumentation.getTransformer()); + .transform(amazonHttpClientV2Instrumentation.getTransformer()) + .type(apacheKafkaInstrumentation.getTypeMatcher()) + .transform(apacheKafkaInstrumentation.getTransformer()) + .type(apacheKafkaConsumerInstrumentation.getTypeMatcher()) + .transform(apacheKafkaConsumerInstrumentation.getTransformer()); builder.installOn(inst); Logger.debug("Finish Instrumentation"); diff --git a/src/main/java/io/lumigo/core/instrumentation/impl/AmazonHttpClientV2Instrumentation.java b/src/main/java/io/lumigo/core/instrumentation/impl/AmazonHttpClientV2Instrumentation.java index e816487..03e2e14 100644 --- a/src/main/java/io/lumigo/core/instrumentation/impl/AmazonHttpClientV2Instrumentation.java +++ b/src/main/java/io/lumigo/core/instrumentation/impl/AmazonHttpClientV2Instrumentation.java @@ -18,9 +18,13 @@ import software.amazon.awssdk.http.SdkHttpRequest; public class AmazonHttpClientV2Instrumentation implements LumigoInstrumentationApi { + + public static final String INSTRUMENTATION_PACKAGE_PREFIX = + "software.amazon.awssdk.core.client.builder.SdkDefaultClientBuilder"; + @Override public ElementMatcher getTypeMatcher() { - return named("software.amazon.awssdk.core.client.builder.SdkDefaultClientBuilder"); + return named(INSTRUMENTATION_PACKAGE_PREFIX); } @Override diff --git a/src/main/java/io/lumigo/core/instrumentation/impl/ApacheKafkaConsumerInstrumentation.java b/src/main/java/io/lumigo/core/instrumentation/impl/ApacheKafkaConsumerInstrumentation.java new file mode 100644 index 0000000..e5345ef --- /dev/null +++ b/src/main/java/io/lumigo/core/instrumentation/impl/ApacheKafkaConsumerInstrumentation.java @@ -0,0 +1,71 @@ +package io.lumigo.core.instrumentation.impl; + +import static net.bytebuddy.matcher.ElementMatchers.*; + +import io.lumigo.core.SpansContainer; +import io.lumigo.core.instrumentation.LumigoInstrumentationApi; +import io.lumigo.core.instrumentation.agent.Loader; +import io.lumigo.core.utils.LRUCache; +import net.bytebuddy.agent.builder.AgentBuilder; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.consumer.internals.ConsumerMetadata; +import org.pmw.tinylog.Logger; + +public class ApacheKafkaConsumerInstrumentation implements LumigoInstrumentationApi { + + public static final String INSTRUMENTATION_PACKAGE_PREFIX = "org.apache.kafka.clients.consumer"; + + @Override + public ElementMatcher getTypeMatcher() { + return named("org.apache.kafka.clients.consumer.KafkaConsumer"); + } + + @Override + public AgentBuilder.Transformer.ForAdvice getTransformer() { + return new AgentBuilder.Transformer.ForAdvice() + .include(Loader.class.getClassLoader()) + .advice( + isMethod() + .and(isPublic()) + .and(named("poll")) + .and(takesArguments(1)) + .and( + returns( + named( + "org.apache.kafka.clients.consumer.ConsumerRecords"))), + ApacheKafkaConsumerAdvice.class.getName()); + } + + public static class ApacheKafkaConsumerAdvice { + public static final SpansContainer spansContainer = SpansContainer.getInstance(); + public static final LRUCache startTimeMap = new LRUCache<>(1000); + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void methodEnter(@Advice.FieldValue("clientId") String clientId) { + try { + startTimeMap.put(clientId, System.currentTimeMillis()); + } catch (Exception e) { + Logger.error(e); + } + } + + @Advice.OnMethodExit(suppress = Throwable.class) + public static void methodExit( + @Advice.This KafkaConsumer consumer, + @Advice.FieldValue("metadata") ConsumerMetadata metadata, + @Advice.FieldValue("clientId") String clientId, + @Advice.Return ConsumerRecords consumerRecords) { + try { + Logger.info("Handling kafka request {}", consumerRecords.hashCode()); + spansContainer.addKafkaConsumeSpan( + startTimeMap.get(clientId), consumer, metadata, consumerRecords); + } catch (Throwable error) { + Logger.error(error, "Failed to add kafka span"); + } + } + } +} diff --git a/src/main/java/io/lumigo/core/instrumentation/impl/ApacheKafkaProducerInstrumentation.java b/src/main/java/io/lumigo/core/instrumentation/impl/ApacheKafkaProducerInstrumentation.java new file mode 100644 index 0000000..c6b3b37 --- /dev/null +++ b/src/main/java/io/lumigo/core/instrumentation/impl/ApacheKafkaProducerInstrumentation.java @@ -0,0 +1,116 @@ +package io.lumigo.core.instrumentation.impl; + +import static net.bytebuddy.matcher.ElementMatchers.*; + +import io.lumigo.core.SpansContainer; +import io.lumigo.core.instrumentation.LumigoInstrumentationApi; +import io.lumigo.core.instrumentation.agent.Loader; +import io.lumigo.models.KafkaSpan; +import java.nio.charset.StandardCharsets; +import java.util.UUID; +import lombok.AllArgsConstructor; +import net.bytebuddy.agent.builder.AgentBuilder; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.apache.kafka.clients.producer.Callback; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.clients.producer.internals.ProducerMetadata; +import org.apache.kafka.common.serialization.Serializer; +import org.pmw.tinylog.Logger; + +public class ApacheKafkaProducerInstrumentation implements LumigoInstrumentationApi { + + public static final String INSTRUMENTATION_PACKAGE_PREFIX = "org.apache.kafka.clients.producer"; + + @Override + public ElementMatcher getTypeMatcher() { + return named("org.apache.kafka.clients.producer.KafkaProducer"); + } + + @Override + public AgentBuilder.Transformer.ForAdvice getTransformer() { + return new AgentBuilder.Transformer.ForAdvice() + .include(Loader.class.getClassLoader()) + .advice( + isMethod() + .and(isPublic()) + .and(named("send")) + .and( + takesArgument( + 0, + named( + "org.apache.kafka.clients.producer.ProducerRecord")) + .and( + takesArgument( + 1, + named( + "org.apache.kafka.clients.producer.Callback")))), + ApacheKafkaProducerAdvice.class.getName()); + } + + public static class ApacheKafkaProducerAdvice { + public static final SpansContainer spansContainer = SpansContainer.getInstance(); + + @Advice.OnMethodEnter + public static void methodEnter( + @Advice.FieldValue("metadata") ProducerMetadata metadata, + @Advice.FieldValue("keySerializer") Serializer keySerializer, + @Advice.FieldValue("valueSerializer") Serializer valueSerializer, + @Advice.Argument(value = 0, readOnly = false) ProducerRecord record, + @Advice.Argument(value = 1, readOnly = false) Callback callback) { + try { + callback = + new KafkaProducerCallback<>( + callback, + keySerializer, + valueSerializer, + metadata, + record, + System.currentTimeMillis()); + + // Try to inject correlation id to the kafka record headers + record.headers() + .add( + KafkaSpan.LUMIGO_MESSAGE_ID_KEY, + UUID.randomUUID() + .toString() + .substring(0, 10) + .getBytes(StandardCharsets.UTF_8)); + } catch (Exception e) { + Logger.error(e); + } + } + + @AllArgsConstructor + public static class KafkaProducerCallback implements Callback { + private final Callback callback; + private final Serializer keySerializer; + private final Serializer valueSerializer; + private final ProducerMetadata producerMetadata; + private final ProducerRecord record; + private final long startTime; + + @Override + public void onCompletion(RecordMetadata recordMetadata, Exception exception) { + try { + if (callback != null) { + callback.onCompletion(recordMetadata, exception); + } + Logger.info("Handling kafka request {}", record.hashCode()); + spansContainer.addKafkaProduceSpan( + startTime, + keySerializer, + valueSerializer, + producerMetadata, + record, + recordMetadata, + exception); + } catch (Throwable error) { + Logger.error(error, "Failed to add kafka span"); + } + } + } + } +} diff --git a/src/main/java/io/lumigo/core/network/Reporter.java b/src/main/java/io/lumigo/core/network/Reporter.java index 325efda..def3ed1 100644 --- a/src/main/java/io/lumigo/core/network/Reporter.java +++ b/src/main/java/io/lumigo/core/network/Reporter.java @@ -3,7 +3,7 @@ import io.lumigo.core.configuration.Configuration; import io.lumigo.core.utils.JsonUtils; import io.lumigo.core.utils.StringUtils; -import io.lumigo.models.Reportable; +import io.lumigo.models.BaseSpan; import java.io.IOException; import java.util.Collections; import java.util.LinkedList; @@ -13,7 +13,7 @@ public class Reporter { - private OkHttpClient client; + private final OkHttpClient client; public Reporter() { client = @@ -22,11 +22,11 @@ public Reporter() { .build(); } - public long reportSpans(Reportable span, int maxSize) throws IOException { + public long reportSpans(BaseSpan span, int maxSize) throws IOException { return reportSpans(Collections.singletonList(span), maxSize); } - public long reportSpans(List spans, int maxSize) throws IOException { + public long reportSpans(List spans, int maxSize) throws IOException { long time = System.currentTimeMillis(); List spansAsStringList = new LinkedList<>(); int sizeCount = 0; @@ -44,7 +44,7 @@ public long reportSpans(List spans, int maxSize) throws IOException handledSpans++; } - if (Configuration.getInstance().isAwsEnvironment() && spansAsStringList.size() > 0) { + if (Configuration.getInstance().isAwsEnvironment() && !spansAsStringList.isEmpty()) { String spansAsString = "[" + String.join(",", spansAsStringList) + "]"; Logger.debug("Reporting the spans: {}", spansAsString); RequestBody body = diff --git a/src/main/java/io/lumigo/core/utils/AwsSdkV2Utils.java b/src/main/java/io/lumigo/core/utils/AwsSdkV2Utils.java index 7432232..0fabacf 100644 --- a/src/main/java/io/lumigo/core/utils/AwsSdkV2Utils.java +++ b/src/main/java/io/lumigo/core/utils/AwsSdkV2Utils.java @@ -10,8 +10,7 @@ @UtilityClass public class AwsSdkV2Utils { - public String calculateItemHash( - Map item) { + public String calculateItemHash(Map item) { Map simpleMap = AwsSdkV2Utils.convertAttributeMapToSimpleMap(item); return StringUtils.buildMd5Hash(JsonUtils.getObjectAsJsonString(simpleMap)); } diff --git a/src/main/java/io/lumigo/core/utils/AwsUtils.java b/src/main/java/io/lumigo/core/utils/AwsUtils.java index 440a619..905b443 100644 --- a/src/main/java/io/lumigo/core/utils/AwsUtils.java +++ b/src/main/java/io/lumigo/core/utils/AwsUtils.java @@ -1,13 +1,13 @@ package io.lumigo.core.utils; -import static io.lumigo.core.utils.StringUtils.dynamodbItemToHash; - import com.amazonaws.services.lambda.runtime.events.*; +import com.amazonaws.services.lambda.runtime.events.models.dynamodb.AttributeValue; import com.fasterxml.jackson.annotation.JsonInclude; +import io.lumigo.core.configuration.Configuration; +import io.lumigo.models.KafkaSpan; import io.lumigo.models.Span; -import java.util.Collections; -import java.util.List; -import java.util.Objects; +import java.nio.charset.StandardCharsets; +import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -155,6 +155,37 @@ public static TriggeredBy extractTriggeredByFromEvent(Object event) { triggeredBy.setTriggeredBy("lex"); } else if (event instanceof CognitoEvent) { triggeredBy.setTriggeredBy("cognito"); + } else if (event instanceof KafkaEvent) { + triggeredBy.setTriggeredBy("kafka"); + triggeredBy.setArn(((KafkaEvent) event).getEventSourceArn()); + String topic = null; + List messageIds = new ArrayList<>(); + if (((KafkaEvent) event).getRecords() != null) { + for (Map.Entry> entry : + ((KafkaEvent) event).getRecords().entrySet()) { + for (KafkaEvent.KafkaEventRecord record : entry.getValue()) { + if (topic == null) { + topic = record.getTopic(); + } + for (Map headers : record.getHeaders()) { + if (headers.containsKey(KafkaSpan.LUMIGO_MESSAGE_ID_KEY)) { + messageIds.add( + new String( + headers.get(KafkaSpan.LUMIGO_MESSAGE_ID_KEY), + StandardCharsets.UTF_8)); + break; + } + } + } + } + } + triggeredBy.setResource(topic); + triggeredBy.setMessageIds( + messageIds.subList( + 0, + Math.min( + messageIds.size(), + Configuration.getInstance().maxBatchMessageIds()))); } else { Logger.info( "Failed to found relevant triggered by found for event {} ", @@ -282,10 +313,44 @@ private static String extractMessageIdFromDynamodbRecord( DynamodbEvent.DynamodbStreamRecord record) { if (record.getEventName() == null) return null; if (record.getEventName().equals("INSERT")) { - return dynamodbItemToHash(record.getDynamodb().getNewImage()); + return calculateItemHash(record.getDynamodb().getNewImage()); } else if (record.getEventName().equals("MODIFY") || record.getEventName().equals("REMOVE")) { - return dynamodbItemToHash(record.getDynamodb().getKeys()); + return calculateItemHash(record.getDynamodb().getKeys()); + } + return null; + } + + private static String calculateItemHash(Map item) { + Map simpleMap = convertAttributeMapToSimpleMap(item); + return StringUtils.buildMd5Hash(JsonUtils.getObjectAsJsonString(simpleMap)); + } + + private static Map convertAttributeMapToSimpleMap( + Map attributeValueMap) { + Map simpleMap = new HashMap<>(); + attributeValueMap.forEach( + (key, value) -> simpleMap.put(key, attributeValueToObject(value))); + return simpleMap; + } + + private static Object attributeValueToObject(AttributeValue value) { + if (value == null) { + return null; + } else if (value.getS() != null) { + return value.getS(); + } else if (value.getN() != null) { + return value.getN(); + } else if (value.getBOOL() != null) { + return value.getBOOL(); + } else if (value.getL() != null && !value.getL().isEmpty()) { + List list = new ArrayList<>(); + for (AttributeValue v : value.getL()) { + list.add(attributeValueToObject(v)); + } + return list; + } else if (value.getM() != null && !value.getM().isEmpty()) { + return convertAttributeMapToSimpleMap(value.getM()); } return null; } diff --git a/src/main/java/io/lumigo/models/BaseSpan.java b/src/main/java/io/lumigo/models/BaseSpan.java new file mode 100644 index 0000000..857afe4 --- /dev/null +++ b/src/main/java/io/lumigo/models/BaseSpan.java @@ -0,0 +1,3 @@ +package io.lumigo.models; + +public interface BaseSpan extends Reportable {} diff --git a/src/main/java/io/lumigo/models/HttpSpan.java b/src/main/java/io/lumigo/models/HttpSpan.java index 91989d5..033e780 100644 --- a/src/main/java/io/lumigo/models/HttpSpan.java +++ b/src/main/java/io/lumigo/models/HttpSpan.java @@ -12,7 +12,7 @@ @AllArgsConstructor @Builder(toBuilder = true) @Data(staticConstructor = "of") -public class HttpSpan implements Reportable { +public class HttpSpan implements BaseSpan { private Long started; private Long ended; private String id; @@ -91,7 +91,7 @@ public static class HttpData { } @Override - public Reportable scrub(SecretScrubber scrubber) { + public BaseSpan scrub(SecretScrubber scrubber) { this.getInfo() .getHttpInfo() .getRequest() @@ -121,7 +121,7 @@ public Reportable scrub(SecretScrubber scrubber) { } @Override - public Reportable reduceSize(int maxFieldSize) { + public BaseSpan reduceSize(int maxFieldSize) { this.getInfo() .getHttpInfo() .getRequest() diff --git a/src/main/java/io/lumigo/models/KafkaSpan.java b/src/main/java/io/lumigo/models/KafkaSpan.java new file mode 100644 index 0000000..e877d47 --- /dev/null +++ b/src/main/java/io/lumigo/models/KafkaSpan.java @@ -0,0 +1,208 @@ +package io.lumigo.models; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.lumigo.core.utils.SecretScrubber; +import io.lumigo.core.utils.StringUtils; +import java.util.*; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.Getter; + +@Getter +@Builder(toBuilder = true) +@AllArgsConstructor +public class KafkaSpan implements BaseSpan { + public static final String LUMIGO_MESSAGE_ID_KEY = "lumigoMessageId"; + public static final String KAFKA_PRODUCER_TYPE = "PRODUCER"; + public static final String KAFKA_CONSUMER_TYPE = "CONSUMER"; + + private Long started; + private Long ended; + private String id; + private String type; + private String transactionId; + private String account; + private String region; + private String token; + private String parentId; + private Info info; + + @Builder(toBuilder = true) + @Data + public static class Info { + private KafkaSpan.Tracer tracer; + private KafkaSpan.TraceId traceId; + private KafkaSpan.KafkaInfo kafkaInfo; + private String messageId; + private List messageIds; + private String resourceName; + private String targetArn; + } + + @AllArgsConstructor + @Builder(toBuilder = true) + @Data + public static class TraceId { + @JsonProperty("Root") + private String root; + } + + @AllArgsConstructor + @Builder(toBuilder = true) + @Data + public static class Tracer { + private String version; + } + + public interface KafkaInfo {} + + @AllArgsConstructor + @Builder(toBuilder = true) + @Data + public static class KafkaProducerInfo implements KafkaInfo { + private String kafkaInfoType; + private String bootstrapServers; + private String topic; + private KafkaSpan.KafkaProducerRecord record; + private KafkaSpan.KafkaProducerResponse response; + } + + @AllArgsConstructor + @Builder(toBuilder = true) + @Data + public static class KafkaProducerRecord { + private String key; + private String value; + private String headers; + } + + public interface KafkaProducerResponse {} + + @AllArgsConstructor + @Builder(toBuilder = true) + @Data + public static class KafkaProducerSuccessResponse implements KafkaProducerResponse { + private Integer partition; + private Long offset; + } + + @AllArgsConstructor + @Builder(toBuilder = true) + @Data + public static class KafkaProducerErrorResponse implements KafkaProducerResponse { + private String errorMessage; + } + + @AllArgsConstructor + @Builder(toBuilder = true) + @Data + public static class KafkaConsumerInfo implements KafkaInfo { + private String kafkaInfoType; + private List bootstrapServers; + private String consumerGroupId; + private Integer recordsCount; + private List topics; + private List records; + } + + @AllArgsConstructor + @Builder(toBuilder = true) + @Data + public static class KafkaConsumerRecord { + private String topic; + private Integer partition; + private Long offset; + private String key; + private String value; + private String headers; + } + + @Override + public BaseSpan scrub(SecretScrubber scrubber) { + if (this.info.kafkaInfo instanceof KafkaProducerInfo) { + KafkaProducerInfo kafkaProducerInfo = (KafkaProducerInfo) this.info.kafkaInfo; + if (kafkaProducerInfo.getRecord() != null) { + kafkaProducerInfo + .getRecord() + .setKey( + scrubber.scrubStringifiedObject( + kafkaProducerInfo.getRecord().getKey())); + kafkaProducerInfo + .getRecord() + .setValue( + scrubber.scrubStringifiedObject( + kafkaProducerInfo.getRecord().getValue())); + kafkaProducerInfo + .getRecord() + .setHeaders( + scrubber.scrubStringifiedObject( + kafkaProducerInfo.getRecord().getHeaders())); + } + if (kafkaProducerInfo.getResponse() instanceof KafkaProducerErrorResponse) { + KafkaProducerErrorResponse kafkaProducerErrorResponse = + (KafkaProducerErrorResponse) kafkaProducerInfo.getResponse(); + kafkaProducerErrorResponse.setErrorMessage( + scrubber.scrubStringifiedObject( + kafkaProducerErrorResponse.getErrorMessage())); + } + } else if (this.info.kafkaInfo instanceof KafkaConsumerInfo) { + KafkaConsumerInfo kafkaConsumerInfo = (KafkaConsumerInfo) this.info.kafkaInfo; + if (kafkaConsumerInfo.getRecords() != null) { + for (KafkaConsumerRecord record : kafkaConsumerInfo.getRecords()) { + record.setKey(scrubber.scrubStringifiedObject(record.getKey())); + record.setValue(scrubber.scrubStringifiedObject(record.getValue())); + record.setHeaders(scrubber.scrubStringifiedObject(record.getHeaders())); + } + } + } + return this; + } + + @Override + public BaseSpan reduceSize(int maxFieldSize) { + if (this.info.kafkaInfo instanceof KafkaProducerInfo) { + KafkaProducerInfo kafkaProducerInfo = (KafkaProducerInfo) this.info.kafkaInfo; + kafkaProducerInfo.setBootstrapServers( + StringUtils.getMaxSizeString( + kafkaProducerInfo.getBootstrapServers(), maxFieldSize)); + kafkaProducerInfo.setTopic( + StringUtils.getMaxSizeString(kafkaProducerInfo.getTopic(), maxFieldSize)); + if (kafkaProducerInfo.getRecord() != null) { + kafkaProducerInfo + .getRecord() + .setKey( + StringUtils.getMaxSizeString( + kafkaProducerInfo.getRecord().getKey(), maxFieldSize)); + kafkaProducerInfo + .getRecord() + .setValue( + StringUtils.getMaxSizeString( + kafkaProducerInfo.getRecord().getValue(), maxFieldSize)); + kafkaProducerInfo + .getRecord() + .setHeaders( + StringUtils.getMaxSizeString( + kafkaProducerInfo.getRecord().getHeaders(), maxFieldSize)); + } + if (kafkaProducerInfo.getResponse() instanceof KafkaProducerErrorResponse) { + KafkaProducerErrorResponse kafkaProducerErrorResponse = + (KafkaProducerErrorResponse) kafkaProducerInfo.getResponse(); + kafkaProducerErrorResponse.setErrorMessage( + StringUtils.getMaxSizeString( + kafkaProducerErrorResponse.getErrorMessage(), maxFieldSize)); + } + } else if (this.info.kafkaInfo instanceof KafkaConsumerInfo) { + KafkaConsumerInfo kafkaConsumerInfo = (KafkaConsumerInfo) this.info.kafkaInfo; + if (kafkaConsumerInfo.getRecords() != null) { + for (KafkaConsumerRecord record : kafkaConsumerInfo.getRecords()) { + record.setKey(StringUtils.getMaxSizeString(record.getKey(), maxFieldSize)); + record.setValue(StringUtils.getMaxSizeString(record.getValue(), maxFieldSize)); + record.setHeaders( + StringUtils.getMaxSizeString(record.getHeaders(), maxFieldSize)); + } + } + } + return this; + } +} diff --git a/src/main/java/io/lumigo/models/KafkaSpanFactory.java b/src/main/java/io/lumigo/models/KafkaSpanFactory.java new file mode 100644 index 0000000..24e07dc --- /dev/null +++ b/src/main/java/io/lumigo/models/KafkaSpanFactory.java @@ -0,0 +1,188 @@ +package io.lumigo.models; + +import static io.lumigo.core.SpansContainer.KAFKA_SPAN_TYPE; + +import io.lumigo.core.utils.JsonUtils; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.stream.Collectors; +import lombok.experimental.UtilityClass; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.consumer.internals.ConsumerMetadata; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.clients.producer.internals.ProducerMetadata; +import org.apache.kafka.common.header.Header; +import org.apache.kafka.common.header.Headers; +import org.apache.kafka.common.serialization.Serializer; + +@UtilityClass +public class KafkaSpanFactory { + public static KafkaSpan createProduce( + Span baseSpan, + Long startTime, + Serializer keySerializer, + Serializer valueSerializer, + ProducerMetadata producerMetadata, + ProducerRecord record, + RecordMetadata recordMetadata, + Exception exception) { + List bootstrapServers = + producerMetadata.fetch().nodes().stream() + .map(node -> node.host() + ":" + node.port()) + .collect(Collectors.toList()); + String topic = record.topic(); + KafkaSpan.KafkaProducerRecord producerRecord = + KafkaSpan.KafkaProducerRecord.builder() + .key( + new String( + keySerializer.serialize( + record.topic(), + record.headers(), + record.key()), + Charset.defaultCharset()) + .trim()) + .value( + new String( + valueSerializer.serialize( + record.topic(), + record.headers(), + record.value()), + Charset.defaultCharset()) + .trim()) + .headers(extractHeaders(record.headers())) + .build(); + + KafkaSpan.KafkaProducerResponse response; + if (exception == null) { + response = + KafkaSpan.KafkaProducerSuccessResponse.builder() + .partition(recordMetadata.partition()) + .offset(recordMetadata.offset()) + .build(); + } else { + response = + KafkaSpan.KafkaProducerErrorResponse.builder() + .errorMessage(exception.getMessage()) + .build(); + } + + Header messageIdHeader = record.headers().lastHeader(KafkaSpan.LUMIGO_MESSAGE_ID_KEY); + String messageId = + messageIdHeader == null + ? null + : new String(messageIdHeader.value(), StandardCharsets.UTF_8); + + return new KafkaSpan.KafkaSpanBuilder() + .id(UUID.randomUUID().toString()) + .started(startTime) + .ended(System.currentTimeMillis()) + .type(KAFKA_SPAN_TYPE) + .transactionId(baseSpan.getTransactionId()) + .account(baseSpan.getAccount()) + .region(baseSpan.getRegion()) + .token(baseSpan.getToken()) + .parentId(baseSpan.getId()) + .info( + KafkaSpan.Info.builder() + .tracer( + KafkaSpan.Tracer.builder() + .version( + baseSpan.getInfo().getTracer().getVersion()) + .build()) + .traceId( + KafkaSpan.TraceId.builder() + .root(baseSpan.getInfo().getTraceId().getRoot()) + .build()) + .messageId(messageId) + .kafkaInfo( + KafkaSpan.KafkaProducerInfo.builder() + .kafkaInfoType(KafkaSpan.KAFKA_PRODUCER_TYPE) + .bootstrapServers( + JsonUtils.getObjectAsJsonString( + bootstrapServers)) + .topic(topic) + .record(producerRecord) + .response(response) + .build()) + .build()) + .build(); + } + + public static KafkaSpan createConsume( + Span baseSpan, + Long startTime, + KafkaConsumer consumer, + ConsumerMetadata consumerMetadata, + ConsumerRecords consumerRecords) { + List messageIds = new ArrayList<>(); + List bootstrapServers = + consumerMetadata.fetch().nodes().stream() + .map(node -> node.host() + ":" + node.port()) + .collect(Collectors.toList()); + List topics = new ArrayList<>(consumer.subscription()); + List records = new ArrayList<>(); + consumerRecords.forEach( + record -> { + Header messageIdHeader = + record.headers().lastHeader(KafkaSpan.LUMIGO_MESSAGE_ID_KEY); + String messageId = + messageIdHeader == null + ? null + : new String(messageIdHeader.value(), StandardCharsets.UTF_8); + if (messageId != null) { + messageIds.add(messageId); + } + records.add( + KafkaSpan.KafkaConsumerRecord.builder() + .topic(record.topic()) + .partition(record.partition()) + .offset(record.offset()) + .key(record.key().toString()) + .value(record.value().toString()) + .headers(extractHeaders(record.headers())) + .build()); + }); + return KafkaSpan.builder() + .id(UUID.randomUUID().toString()) + .started(startTime) + .ended(System.currentTimeMillis()) + .type(KAFKA_SPAN_TYPE) + .transactionId(baseSpan.getTransactionId()) + .account(baseSpan.getAccount()) + .region(baseSpan.getRegion()) + .token(baseSpan.getToken()) + .parentId(baseSpan.getId()) + .info( + KafkaSpan.Info.builder() + .tracer( + KafkaSpan.Tracer.builder() + .version( + baseSpan.getInfo().getTracer().getVersion()) + .build()) + .traceId( + KafkaSpan.TraceId.builder() + .root(baseSpan.getInfo().getTraceId().getRoot()) + .build()) + .messageIds(messageIds) + .kafkaInfo( + KafkaSpan.KafkaConsumerInfo.builder() + .kafkaInfoType(KafkaSpan.KAFKA_CONSUMER_TYPE) + .bootstrapServers(bootstrapServers) + .consumerGroupId(consumer.groupMetadata().groupId()) + .topics(topics) + .recordsCount(consumerRecords.count()) + .records(records) + .build()) + .build()) + .build(); + } + + private static String extractHeaders(Headers headers) { + return JsonUtils.getObjectAsJsonString( + Arrays.stream(headers.toArray()) + .collect(Collectors.toMap(Header::key, Header::value))); + } +} diff --git a/src/main/java/io/lumigo/models/Reportable.java b/src/main/java/io/lumigo/models/Reportable.java index b73845c..fafd1eb 100644 --- a/src/main/java/io/lumigo/models/Reportable.java +++ b/src/main/java/io/lumigo/models/Reportable.java @@ -2,8 +2,8 @@ import io.lumigo.core.utils.SecretScrubber; -public interface Reportable { - public Reportable scrub(SecretScrubber scrubber); +public interface Reportable { + T scrub(SecretScrubber scrubber); - public Reportable reduceSize(int maxFieldSize); + T reduceSize(int maxFieldSize); } diff --git a/src/main/java/io/lumigo/models/Span.java b/src/main/java/io/lumigo/models/Span.java index cb409da..f151184 100644 --- a/src/main/java/io/lumigo/models/Span.java +++ b/src/main/java/io/lumigo/models/Span.java @@ -14,7 +14,7 @@ @AllArgsConstructor @Builder(toBuilder = true) @Data(staticConstructor = "of") -public class Span implements Reportable { +public class Span implements BaseSpan { private String name; private long started; private long ended; @@ -90,24 +90,22 @@ public String toString() { } @Override - public Reportable scrub(SecretScrubber scrubber) { + public Span scrub(SecretScrubber scrubber) { this.setEnvs( JsonUtils.getObjectAsJsonString(scrubber.scrubStringifiedObject(this.getEnvs()))); this.setEvent( JsonUtils.getObjectAsJsonString(scrubber.scrubStringifiedObject(this.getEvent()))); this.setReturn_value(scrubber.scrubStringifiedObject(this.getReturn_value())); - return this; } @Override - public Reportable reduceSize(int maxFieldSize) { + public Span reduceSize(int maxFieldSize) { this.setEnvs( StringUtils.getMaxSizeString( this.getEnvs(), Configuration.getInstance().maxSpanFieldSize())); this.setReturn_value(StringUtils.getMaxSizeString(this.getReturn_value(), maxFieldSize)); this.setEvent(StringUtils.getMaxSizeString(this.getEvent(), maxFieldSize)); - return this; } } diff --git a/src/test/java/infa/AwsLambdaEventGenerator.java b/src/test/java/infa/AwsLambdaEventGenerator.java index ba7256c..59ff2d1 100644 --- a/src/test/java/infa/AwsLambdaEventGenerator.java +++ b/src/test/java/infa/AwsLambdaEventGenerator.java @@ -3,10 +3,10 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import com.amazonaws.services.dynamodbv2.model.AttributeValue; -import com.amazonaws.services.dynamodbv2.model.StreamRecord; import com.amazonaws.services.lambda.runtime.events.*; -import com.amazonaws.services.s3.event.S3EventNotification; +import com.amazonaws.services.lambda.runtime.events.models.dynamodb.AttributeValue; +import com.amazonaws.services.lambda.runtime.events.models.dynamodb.StreamRecord; +import com.amazonaws.services.lambda.runtime.events.models.s3.*; import com.sun.tools.javac.util.List; import java.util.Collections; import java.util.Date; @@ -14,7 +14,6 @@ import java.util.Map; public class AwsLambdaEventGenerator { - public S3Event s3Event() { S3Event s3Event = mock(S3Event.class); S3EventNotification.S3Entity s3Entity = mock(S3EventNotification.S3Entity.class); @@ -133,8 +132,7 @@ public APIGatewayProxyRequestEvent apiGatewayProxyRequestEvent() { } public CloudWatchLogsEvent cloudWatchLogsEvent() { - CloudWatchLogsEvent cloudWatchLogsEvent = mock(CloudWatchLogsEvent.class); - return cloudWatchLogsEvent; + return mock(CloudWatchLogsEvent.class); } public ScheduledEvent scheduledEvent() { @@ -144,22 +142,42 @@ public ScheduledEvent scheduledEvent() { } public CloudFrontEvent cloudFrontEvent() { - CloudFrontEvent event = mock(CloudFrontEvent.class); - return event; + return mock(CloudFrontEvent.class); } public CodeCommitEvent codeCommitEvent() { - CodeCommitEvent event = mock(CodeCommitEvent.class); - return event; + return mock(CodeCommitEvent.class); } public LexEvent lexEvent() { - LexEvent event = mock(LexEvent.class); - return event; + return mock(LexEvent.class); } public CognitoEvent cognitoEvent() { - CognitoEvent event = mock(CognitoEvent.class); - return event; + return mock(CognitoEvent.class); + } + + public KafkaEvent kafkaEvent() { + return new KafkaEvent( + Collections.singletonMap( + "msk-topic-0", + Collections.singletonList( + KafkaEvent.KafkaEventRecord.builder() + .withTopic("msk-topic") + .withPartition(0) + .withOffset(1612) + .withTimestamp(1716147063028L) + .withTimestampType("CREATE_TIME") + .withKey("a2V5") + .withValue("TWVzc2FnZSBhdCAxNzE2MTQ3MDYzMDE1") + .withHeaders( + Collections.singletonList( + Collections.singletonMap( + "lumigoMessageId", + "NjA1ZTU0YWItMA==".getBytes()))) + .build())), + "aws:kafka", + "arn:aws:kafka:us-west-2:779055952581:cluster/MskLambdaStackCluster/8fff24de-4f6c-44ca-b072-61d7a1b450a4-2", + "b-2.msklambdastackcluster.29ysmg.c2.kafka.us-west-2.amazonaws.com:9098,b-1.msklambdastackcluster.29ysmg.c2.kafka.us-west-2.amazonaws.com:9098"); } } diff --git a/src/test/java/io/lumigo/core/SpansContainerTest.java b/src/test/java/io/lumigo/core/SpansContainerTest.java index f83c9df..1b8bc28 100644 --- a/src/test/java/io/lumigo/core/SpansContainerTest.java +++ b/src/test/java/io/lumigo/core/SpansContainerTest.java @@ -353,7 +353,7 @@ void add_http_span() throws Exception { // Trigger scrubbing spansContainer.end(); - HttpSpan actualSpan = spansContainer.getHttpSpans().get(0); + HttpSpan actualSpan = (HttpSpan) spansContainer.getSpans().get(0); String expectedSpan = "{\n" + " \"started\":1559127760071,\n" @@ -421,7 +421,7 @@ void add_aws_http_span_with_spnid_from_header_amzn() throws Exception { // Applies scrubbing spansContainer.end(); - HttpSpan actualSpan = spansContainer.getHttpSpans().get(0); + HttpSpan actualSpan = (HttpSpan) spansContainer.getSpans().get(0); String expectedSpan = "{\n" + " \"started\":1559127760071,\n" @@ -487,7 +487,7 @@ void add_aws_http_span_with_spnid_from_header_amz() throws Exception { long startTime = System.currentTimeMillis(); spansContainer.addHttpSpan(startTime, awsRequest, awsResponse); - HttpSpan actualSpan = spansContainer.getHttpSpans().get(0); + HttpSpan actualSpan = (HttpSpan) spansContainer.getSpans().get(0); String expectedSpan = "{\n" + " \"started\":1559127760071,\n" @@ -577,7 +577,7 @@ void add_aws_sdk_v2_http_span() throws Exception { // Triggers scrubbing spansContainer.end(); - HttpSpan actualSpan = spansContainer.getHttpSpans().get(0); + HttpSpan actualSpan = (HttpSpan) spansContainer.getSpans().get(0); String expectedSpan = "{\n" + " \"started\":1559127760071,\n" diff --git a/src/test/java/io/lumigo/core/instrumentation/impl/ApacheHttpInstrumentationTest.java b/src/test/java/io/lumigo/core/instrumentation/impl/ApacheHttpInstrumentationTest.java index 96d2925..0a583ae 100644 --- a/src/test/java/io/lumigo/core/instrumentation/impl/ApacheHttpInstrumentationTest.java +++ b/src/test/java/io/lumigo/core/instrumentation/impl/ApacheHttpInstrumentationTest.java @@ -92,7 +92,7 @@ public void handling_exit_response_create_new_span() throws Exception { ApacheHttpInstrumentation.ApacheHttpAdvice.methodExit(request, response); - assertEquals(1, SpansContainer.getInstance().getHttpSpans().size()); + assertEquals(1, SpansContainer.getInstance().getSpans().size()); assertNotNull(ApacheHttpInstrumentation.ApacheHttpAdvice.handled.get(request.hashCode())); } diff --git a/src/test/java/io/lumigo/core/utils/AwsUtilsTest.java b/src/test/java/io/lumigo/core/utils/AwsUtilsTest.java index 753b85d..86e0031 100644 --- a/src/test/java/io/lumigo/core/utils/AwsUtilsTest.java +++ b/src/test/java/io/lumigo/core/utils/AwsUtilsTest.java @@ -249,6 +249,21 @@ void test_extractTriggeredByFromEvent_scheduledEvent() throws JSONException { true); } + @Test + void test_extractTriggeredByFromEvent_KafkaEvent() throws JSONException { + Object event = awsLambdaEventGenerator.kafkaEvent(); + + JSONAssert.assertEquals( + "{" + + "\"triggeredBy\": \"kafka\", " + + "\"arn\": \"arn:aws:kafka:us-west-2:779055952581:cluster/MskLambdaStackCluster/8fff24de-4f6c-44ca-b072-61d7a1b450a4-2\", " + + "\"resource\": \"msk-topic\", " + + "\"messageIds\": [\"NjA1ZTU0YWItMA==\"]" + + "}", + JsonUtils.getObjectAsJsonString(AwsUtils.extractTriggeredByFromEvent(event)), + true); + } + @ParameterizedTest @CsvSource({ "1.5.0, 5", diff --git a/src/test/java/io/lumigo/handlers/LumigoRequestHandlerTest.java b/src/test/java/io/lumigo/handlers/LumigoRequestHandlerTest.java index 58910cb..8fdf586 100644 --- a/src/test/java/io/lumigo/handlers/LumigoRequestHandlerTest.java +++ b/src/test/java/io/lumigo/handlers/LumigoRequestHandlerTest.java @@ -15,7 +15,7 @@ import io.lumigo.core.network.Reporter; import io.lumigo.core.utils.EnvUtil; import io.lumigo.core.utils.JsonUtils; -import io.lumigo.models.Reportable; +import io.lumigo.models.BaseSpan; import io.lumigo.models.Span; import io.lumigo.testUtils.JsonTestUtils; import java.io.IOException; @@ -453,10 +453,8 @@ public void LumigoRequestStreamHandler_happy_flow_response() throws Exception { handler.handleRequest(null, null, context); - ArgumentCaptor> argumentCaptorAllSpans = - ArgumentCaptor.forClass(List.class); - ArgumentCaptor argumentCaptorStartSpan = - ArgumentCaptor.forClass(Reportable.class); + ArgumentCaptor> argumentCaptorAllSpans = ArgumentCaptor.forClass(List.class); + ArgumentCaptor argumentCaptorStartSpan = ArgumentCaptor.forClass(BaseSpan.class); verify(reporter, Mockito.times(1)).reportSpans(argumentCaptorStartSpan.capture(), anyInt()); verify(reporter, Mockito.times(1)).reportSpans(argumentCaptorAllSpans.capture(), anyInt()); diff --git a/src/test/java/io/lumigo/models/KafkaSpanFactoryTest.java b/src/test/java/io/lumigo/models/KafkaSpanFactoryTest.java new file mode 100644 index 0000000..1695fae --- /dev/null +++ b/src/test/java/io/lumigo/models/KafkaSpanFactoryTest.java @@ -0,0 +1,280 @@ +package io.lumigo.models; + +import static org.mockito.Mockito.when; + +import io.lumigo.core.utils.JsonUtils; +import java.util.Collections; +import org.apache.kafka.clients.consumer.ConsumerGroupMetadata; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.consumer.internals.ConsumerMetadata; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.clients.producer.internals.ProducerMetadata; +import org.apache.kafka.common.Cluster; +import org.apache.kafka.common.Node; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.header.internals.RecordHeader; +import org.apache.kafka.common.serialization.StringSerializer; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.skyscreamer.jsonassert.Customization; +import org.skyscreamer.jsonassert.JSONAssert; +import org.skyscreamer.jsonassert.JSONCompareMode; +import org.skyscreamer.jsonassert.comparator.CustomComparator; + +public class KafkaSpanFactoryTest { + private static final Long NOW = System.currentTimeMillis(); + private static final String BOOTSTRAP_SERVERS = "bootstrap-servers"; + private static final int PORT = 9092; + private static final String TOPIC = "topic"; + private static final int PARTITION = 1; + private static final long OFFSET = 12345L; + private static final String GROUP_ID = "groupId"; + private Span baseSpan; + private RecordMetadata recordMetadata; + @Mock private ProducerMetadata producerMetadata; + @Mock private KafkaConsumer consumer; + @Mock private ConsumerMetadata consumerMetadata; + + @BeforeEach + public void setup() { + MockitoAnnotations.initMocks(this); + baseSpan = + Span.builder() + .started(1L) + .ended(2L) + .info( + Span.Info.builder() + .tracer(Span.Tracer.builder().version("1.0").build()) + .traceId(Span.TraceId.builder().root("1-2-3").build()) + .approxEventCreationTime(12) + .build()) + .build(); + recordMetadata = + new RecordMetadata(new TopicPartition(TOPIC, PARTITION), OFFSET, 0, 0, 0L, 0, 0); + Cluster cluster = + new Cluster( + "clusterId", + Collections.singletonList(new Node(0, BOOTSTRAP_SERVERS, PORT)), + Collections.emptySet(), + Collections.emptySet(), + Collections.emptySet(), + new Node(0, BOOTSTRAP_SERVERS, PORT)); + when(producerMetadata.fetch()).thenReturn(cluster); + when(consumerMetadata.fetch()).thenReturn(cluster); + when(consumer.subscription()).thenReturn(Collections.singleton(TOPIC)); + when(consumer.groupMetadata()).thenReturn(new ConsumerGroupMetadata(GROUP_ID)); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testCreateProduceSpan(boolean injectHeaders) throws Exception { + ProducerRecord producerRecord; + String messageId; + String headers; + if (injectHeaders) { + producerRecord = + new ProducerRecord<>( + "topic", + PARTITION, + "key", + "value", + Collections.singletonList( + new RecordHeader("lumigoMessageId", "123".getBytes()))); + messageId = "\"123\""; + headers = "'{\"lumigoMessageId\":\"MTIz\"}'"; + } else { + producerRecord = new ProducerRecord<>("topic", PARTITION, "key", "value"); + messageId = null; + headers = "'{}'"; + } + + KafkaSpan result = + KafkaSpanFactory.createProduce( + baseSpan, + NOW, + new StringSerializer(), + new StringSerializer(), + producerMetadata, + producerRecord, + recordMetadata, + null); + + String expectedSpan = + "{" + + "\"started\":1716208357697," + + "\"ended\":1716208358649," + + "\"id\":\"9746e565-9f1a-4b9c-92f7-9a63337e1193\"," + + "\"type\":\"kafka\"," + + "\"transactionId\":null," + + "\"account\":null," + + "\"region\":null," + + "\"token\":null," + + "\"parentId\":null," + + "\"info\":" + + " {" + + " \"tracer\": {\"version\":\"1.0\"}," + + " \"traceId\":{\"Root\":\"1-2-3\"}," + + " \"kafkaInfo\":" + + " {" + + " \"bootstrapServers\": '[\"bootstrap-servers:9092\"]'," + + " \"topic\":\"topic\"," + + " \"record\": {\"key\":\"key\",\"value\":\"value\",\"headers\":" + + headers + + "}," + + " \"response\":{\"partition\":1,\"offset\":12345}" + + " }," + + " \"messageId\":" + + messageId + + "," + + " \"messageIds\":null," + + " \"resourceName\":null," + + " \"targetArn\":null" + + "}" + + "}"; + JSONAssert.assertEquals( + expectedSpan, + JsonUtils.getObjectAsJsonString(result), + new CustomComparator( + JSONCompareMode.LENIENT, + new Customization("info.tracer.version", (o1, o2) -> o1 != null), + new Customization("id", (o1, o2) -> o1 != null), + new Customization("started", (o1, o2) -> o1 != null), + new Customization("ended", (o1, o2) -> o1 != null))); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testCreateProduceSpanWithError(boolean injectHeaders) throws Exception { + ProducerRecord producerRecord; + String messageId; + String headers; + if (injectHeaders) { + producerRecord = + new ProducerRecord<>( + "topic", + PARTITION, + "key", + "value", + Collections.singletonList( + new RecordHeader("lumigoMessageId", "123".getBytes()))); + messageId = "\"123\""; + headers = "'{\"lumigoMessageId\":\"MTIz\"}'"; + } else { + producerRecord = new ProducerRecord<>("topic", PARTITION, "key", "value"); + messageId = null; + headers = "'{}'"; + } + + KafkaSpan result = + KafkaSpanFactory.createProduce( + baseSpan, + NOW, + new StringSerializer(), + new StringSerializer(), + producerMetadata, + producerRecord, + recordMetadata, + new Exception("Failed to produce message")); + + String expectedSpan = + "{" + + "\"started\":1716208357697," + + "\"ended\":1716208358649," + + "\"id\":\"9746e565-9f1a-4b9c-92f7-9a63337e1193\"," + + "\"type\":\"kafka\"," + + "\"transactionId\":null," + + "\"account\":null," + + "\"region\":null," + + "\"token\":null," + + "\"parentId\":null," + + "\"info\":" + + " {" + + " \"tracer\": {\"version\":\"1.0\"}," + + " \"traceId\":{\"Root\":\"1-2-3\"}," + + " \"kafkaInfo\":" + + " {" + + " \"bootstrapServers\": '[\"bootstrap-servers:9092\"]'," + + " \"topic\":\"topic\"," + + " \"record\": {\"key\":\"key\",\"value\":\"value\",\"headers\":" + + headers + + "}," + + " \"response\":{\"errorMessage\": \"Failed to produce message\"}" + + " }," + + " \"messageId\":" + + messageId + + "," + + " \"messageIds\":null," + + " \"resourceName\":null," + + " \"targetArn\":null" + + "}" + + "}"; + JSONAssert.assertEquals( + expectedSpan, + JsonUtils.getObjectAsJsonString(result), + new CustomComparator( + JSONCompareMode.LENIENT, + new Customization("info.tracer.version", (o1, o2) -> o1 != null), + new Customization("id", (o1, o2) -> o1 != null), + new Customization("started", (o1, o2) -> o1 != null), + new Customization("ended", (o1, o2) -> o1 != null))); + } + + @Test + public void testCreateConsumeSpan() throws Exception { + ConsumerRecords consumerRecords = + new ConsumerRecords<>( + Collections.singletonMap( + new TopicPartition(TOPIC, PARTITION), + Collections.singletonList( + new ConsumerRecord<>( + TOPIC, PARTITION, 0, "key", "value")))); + KafkaSpan result = + KafkaSpanFactory.createConsume( + baseSpan, NOW, consumer, consumerMetadata, consumerRecords); + + String expectedSpan = + "{" + + "\"started\":1716210606909," + + "\"ended\":1716210608628," + + "\"id\":\"19abee7e-67a7-4263-882c-3f251163913b\"," + + "\"type\":\"kafka\"," + + "\"transactionId\":null," + + "\"account\":null," + + "\"region\":null," + + "\"token\":null," + + "\"parentId\":null," + + "\"info\":{" + + " \"tracer\":{\"version\":\"1.0\"}," + + " \"traceId\":{\"Root\":\"1-2-3\"}," + + " \"kafkaInfo\":" + + " {" + + " \"bootstrapServers\":[\"bootstrap-servers:9092\"]," + + " \"consumerGroupId\":\"groupId\"," + + " \"recordsCount\":1," + + " \"topics\":[\"topic\"]," + + " \"records\":[{\"topic\":\"topic\",\"partition\":1,\"offset\":0,\"key\":\"key\",\"value\":\"value\",\"headers\":'{}'}]" + + " }," + + " \"messageId\":null," + + " \"messageIds\":[]," + + " \"resourceName\":null," + + " \"targetArn\":null" + + "}" + + "}"; + JSONAssert.assertEquals( + expectedSpan, + JsonUtils.getObjectAsJsonString(result), + new CustomComparator( + JSONCompareMode.LENIENT, + new Customization("info.tracer.version", (o1, o2) -> o1 != null), + new Customization("id", (o1, o2) -> o1 != null), + new Customization("started", (o1, o2) -> o1 != null), + new Customization("ended", (o1, o2) -> o1 != null))); + } +} diff --git a/src/test/java/io/lumigo/models/KafkaSpanTest.java b/src/test/java/io/lumigo/models/KafkaSpanTest.java new file mode 100644 index 0000000..e90a258 --- /dev/null +++ b/src/test/java/io/lumigo/models/KafkaSpanTest.java @@ -0,0 +1,106 @@ +package io.lumigo.models; + +import io.lumigo.core.utils.SecretScrubber; +import java.util.Collections; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import org.junit.jupiter.api.Test; + +public class KafkaSpanTest { + + @Test + public void testScrub() throws Exception { + KafkaSpan kafkaSpan = + KafkaSpan.builder() + .info( + KafkaSpan.Info.builder() + .kafkaInfo( + KafkaSpan.KafkaProducerInfo.builder() + .bootstrapServers( + StringUtils.repeat( + "bootstrapServer,", 10)) + .record( + KafkaSpan.KafkaProducerRecord + .builder() + .key("{\"key\":\"value\"}") + .value( + StringUtils.repeat( + "value,", + 10)) + .headers( + StringUtils.repeat( + "headers,", + 10)) + .build()) + .build()) + .build()) + .build(); + + KafkaSpan result = + (KafkaSpan) + kafkaSpan.scrub( + new SecretScrubber( + Collections.singletonMap( + "LUMIGO_SECRET_MASKING_REGEX", ".*key.*"))); + ; + Assert.assertEquals( + "{\"key\":\"****\"}", + ((KafkaSpan.KafkaProducerInfo) result.getInfo().getKafkaInfo()) + .getRecord() + .getKey()); + } + + @Test + public void testReduceSizeProduce() throws Exception { + + KafkaSpan kafkaSpan = + KafkaSpan.builder() + .info( + KafkaSpan.Info.builder() + .kafkaInfo( + KafkaSpan.KafkaProducerInfo.builder() + .bootstrapServers( + StringUtils.repeat( + "bootstrapServer,", 10)) + .record( + KafkaSpan.KafkaProducerRecord + .builder() + .key( + StringUtils.repeat( + "key,", 10)) + .value( + StringUtils.repeat( + "value,", + 10)) + .headers( + StringUtils.repeat( + "headers,", + 10)) + .build()) + .build()) + .build()) + .build(); + + KafkaSpan result = (KafkaSpan) kafkaSpan.reduceSize(10); + + assert ((KafkaSpan.KafkaProducerInfo) result.getInfo().getKafkaInfo()) + .getBootstrapServers() + .length() + == 10; + assert ((KafkaSpan.KafkaProducerInfo) result.getInfo().getKafkaInfo()) + .getRecord() + .getKey() + .length() + == 10; + assert ((KafkaSpan.KafkaProducerInfo) result.getInfo().getKafkaInfo()) + .getRecord() + .getValue() + .length() + == 10; + assert ((KafkaSpan.KafkaProducerInfo) result.getInfo().getKafkaInfo()) + .getRecord() + .getHeaders() + .length() + == 10; + } +}