Skip to content

Commit

Permalink
Improve logging for exceptions (#4942)
Browse files Browse the repository at this point in the history
Improve logging for exceptions

Signed-off-by: Srikanth Govindarajan <srigovs@amazon.com>
  • Loading branch information
srikanthjg authored Sep 18, 2024
1 parent 2919e99 commit 87c560a
Show file tree
Hide file tree
Showing 21 changed files with 200 additions and 70 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
public final class DataPrepperMarkers {
public static final Marker EVENT = MarkerFactory.getMarker("EVENT");
public static final Marker SENSITIVE = MarkerFactory.getMarker("SENSITIVE");
public static final Marker NOISY = MarkerFactory.getMarker("NOISY");

static {
EVENT.add(SENSITIVE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import com.fasterxml.jackson.annotation.JsonClassDescription;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
import org.opensearch.dataprepper.model.processor.Processor;
Expand All @@ -34,7 +35,7 @@ public Collection<Record<?>> execute(final Collection<Record<?>> records) {
try {
Thread.sleep(delayDuration.toMillis());
} catch (final InterruptedException ex) {
LOG.error("Interrupted during delay processor", ex);
LOG.error(NOISY, "Interrupted during delay processor", ex);
}
return records;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,18 @@
import com.fasterxml.jackson.annotation.JsonClassDescription;
import com.fasterxml.jackson.annotation.JsonPropertyDescription;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
import org.opensearch.dataprepper.model.configuration.PluginSetting;
import org.opensearch.dataprepper.model.event.Event;
import org.opensearch.dataprepper.model.event.JacksonEvent;
import org.opensearch.dataprepper.model.processor.Processor;
import org.opensearch.dataprepper.model.record.Record;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -26,8 +28,6 @@
import java.util.Map;
import java.util.stream.Collectors;

import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;

/**
* A simple String implementation of {@link Processor} which generates new Records with uppercase or lowercase content. The current
* simpler implementation does not handle errors (if any).
Expand Down Expand Up @@ -84,7 +84,13 @@ public Collection<Record<Event>> execute(final Collection<Record<Event>> records
.build();
modifiedRecords.add(new Record<>(newRecordEvent));
} catch (JsonProcessingException e) {
LOG.error(EVENT, "Unable to process Event data: {}", eventJson, e);
LOG.atError()
.addMarker(EVENT)
.addMarker(NOISY)
.setMessage("Unable to process Event data: {}")
.addArgument(eventJson)
.setCause(e)
.log();
}
}
return modifiedRecords;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,14 @@

package org.opensearch.dataprepper.plugins.processor.csv;

import com.fasterxml.jackson.databind.MappingIterator;
import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvParser;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;
import io.micrometer.core.instrument.Counter;
import org.opensearch.dataprepper.expression.ExpressionEvaluator;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.metrics.PluginMetrics;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
Expand All @@ -14,11 +21,6 @@
import org.opensearch.dataprepper.model.processor.AbstractProcessor;
import org.opensearch.dataprepper.model.processor.Processor;
import org.opensearch.dataprepper.model.record.Record;
import com.fasterxml.jackson.databind.MappingIterator;
import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvParser;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;
import io.micrometer.core.instrument.Counter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -28,8 +30,6 @@
import java.util.List;
import java.util.Objects;

import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;

/**
* Processor to parse CSV data in Events.
*
Expand Down Expand Up @@ -107,7 +107,13 @@ public Collection<Record<Event>> doExecute(final Collection<Record<Event>> recor
}
} catch (final IOException e) {
csvInvalidEventsCounter.increment();
LOG.error(EVENT, "An exception occurred while reading event [{}]", event, e);
LOG.atError()
.addMarker(EVENT)
.addMarker(NOISY)
.setMessage("An exception occurred while reading event [{}]")
.addArgument(event)
.setCause(e)
.log();
}
}
return records;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import io.micrometer.core.instrument.Counter;
import org.apache.commons.lang3.tuple.Pair;
import org.opensearch.dataprepper.expression.ExpressionEvaluator;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.metrics.PluginMetrics;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
Expand Down Expand Up @@ -102,7 +103,7 @@ public Collection<Record<Event>> doExecute(Collection<Record<Event>> records) {
record.getData().put(dateProcessorConfig.getDestination(), zonedDateTime);
}
} catch (final Exception e) {
LOG.error("An exception occurred while attempting to process Event: ", e);
LOG.error(NOISY,"An exception occurred while attempting to process Event: ", e);
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
package org.opensearch.dataprepper.plugins.processor.dissect;

import org.opensearch.dataprepper.expression.ExpressionEvaluator;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.metrics.PluginMetrics;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
Expand All @@ -26,8 +28,6 @@
import java.util.Map;
import java.util.Objects;

import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;


@DataPrepperPlugin(name = "dissect", pluginType = Processor.class, pluginConfigurationType = DissectProcessorConfig.class)
public class DissectProcessor extends AbstractProcessor<Record<Event>, Record<Event>> {
Expand Down Expand Up @@ -72,7 +72,13 @@ public Collection<Record<Event>> doExecute(Collection<Record<Event>> records) {
}
}
} catch (Exception ex){
LOG.error(EVENT, "Error dissecting the event [{}] ", record.getData(), ex);
LOG.atError()
.addMarker(EVENT)
.addMarker(NOISY)
.setMessage("Error dissecting the event [{}]")
.addArgument(record.getData())
.setCause(ex)
.log();
}
}
return records;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import com.github.wnameless.json.flattener.JsonFlattener;
import org.opensearch.dataprepper.expression.ExpressionEvaluator;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.metrics.PluginMetrics;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
Expand Down Expand Up @@ -87,7 +88,7 @@ public Collection<Record<Event>> doExecute(final Collection<Record<Event>> recor

updateEvent(recordEvent, flattenedJson);
} catch (Exception e) {
LOG.error("Fail to perform flatten operation", e);
LOG.error(NOISY, "Fail to perform flatten operation", e);
recordEvent.getMetadata().addTags(config.getTagsOnFailure());
} finally {
// Add temporarily deleted fields back
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import io.micrometer.core.instrument.Counter;
import org.opensearch.dataprepper.expression.ExpressionEvaluator;
import org.opensearch.dataprepper.logging.DataPrepperMarkers;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.metrics.PluginMetrics;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
Expand Down Expand Up @@ -107,7 +108,7 @@ public Collection<Record<Event>> doExecute(final Collection<Record<Event>> recor
try (final GeoIPDatabaseReader geoIPDatabaseReader = BatchGeoIPDatabaseReader.decorate(geoIPProcessorService.getGeoIPDatabaseReader())) {
processRecords(records, geoIPDatabaseReader);
} catch (final Exception e) {
LOG.error("Encountered exception in geoip processor.", e);
LOG.error(NOISY, "Encountered exception in geoip processor.", e);
}
return records;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.Timer;
import org.opensearch.dataprepper.expression.ExpressionEvaluator;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.metrics.PluginMetrics;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
Expand All @@ -21,6 +23,8 @@
import org.opensearch.dataprepper.model.processor.AbstractProcessor;
import org.opensearch.dataprepper.model.processor.Processor;
import org.opensearch.dataprepper.model.record.Record;
import static org.opensearch.dataprepper.plugins.processor.grok.GrokProcessorConfig.TOTAL_PATTERNS_ATTEMPTED_METADATA_KEY;
import static org.opensearch.dataprepper.plugins.processor.grok.GrokProcessorConfig.TOTAL_TIME_SPENT_IN_GROK_METADATA_KEY;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -54,10 +58,6 @@
import java.util.regex.PatternSyntaxException;
import java.util.stream.Collectors;

import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;
import static org.opensearch.dataprepper.plugins.processor.grok.GrokProcessorConfig.TOTAL_PATTERNS_ATTEMPTED_METADATA_KEY;
import static org.opensearch.dataprepper.plugins.processor.grok.GrokProcessorConfig.TOTAL_TIME_SPENT_IN_GROK_METADATA_KEY;


@SingleThread
@DataPrepperPlugin(name = "grok", pluginType = Processor.class, pluginConfigurationType = GrokProcessorConfig.class)
Expand Down Expand Up @@ -153,11 +153,25 @@ public Collection<Record<Event>> doExecute(final Collection<Record<Event>> recor

} catch (final TimeoutException e) {
event.getMetadata().addTags(tagsOnTimeout);
LOG.error(EVENT, "Matching on record [{}] took longer than [{}] and timed out", record.getData(), grokProcessorConfig.getTimeoutMillis());
LOG.atError()
.addMarker(EVENT)
.addMarker(NOISY)
.setMessage("Matching on record [{}] took longer than [{}] and timed out")
.addArgument(record.getData())
.addArgument(grokProcessorConfig.getTimeoutMillis())
.log();

grokProcessingTimeoutsCounter.increment();
} catch (final ExecutionException | InterruptedException | RuntimeException e) {
event.getMetadata().addTags(tagsOnMatchFailure);
LOG.error(EVENT, "An exception occurred when matching record [{}]", record.getData(), e);
LOG.atError()
.addMarker(EVENT)
.addMarker(NOISY)
.setMessage("An exception occurred when matching record [{}]")
.addArgument(record.getData())
.setCause(e)
.log();

grokProcessingErrorsCounter.increment();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
package org.opensearch.dataprepper.plugins.processor.mutateevent;

import org.opensearch.dataprepper.expression.ExpressionEvaluator;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.metrics.PluginMetrics;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
Expand All @@ -26,8 +28,6 @@
import java.util.function.Consumer;
import java.util.function.Supplier;

import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;

@DataPrepperPlugin(name = "add_entries", pluginType = Processor.class, pluginConfigurationType = AddEntryProcessorConfig.class)
public class AddEntryProcessor extends AbstractProcessor<Record<Event>, Record<Event>> {
private static final Logger LOG = LoggerFactory.getLogger(AddEntryProcessor.class);
Expand Down Expand Up @@ -92,12 +92,28 @@ public Collection<Record<Event>> doExecute(final Collection<Record<Event>> recor
}
}
} catch (Exception e) {
LOG.error(EVENT, "Error adding entry to record [{}] with key [{}], metadataKey [{}], value_expression [{}] format [{}], value [{}]",
recordEvent, entry.getKey(), entry.getMetadataKey(), entry.getValueExpression(), entry.getFormat(), entry.getValue(), e);
LOG.atError()
.addMarker(EVENT)
.addMarker(NOISY)
.setMessage("Error adding entry to record [{}] with key [{}], metadataKey [{}], value_expression [{}] format [{}], value [{}]")
.addArgument(recordEvent)
.addArgument(entry.getKey())
.addArgument(entry.getMetadataKey())
.addArgument(entry.getValueExpression())
.addArgument(entry.getFormat())
.addArgument(entry.getValue())
.setCause(e)
.log();
}
}
} catch(final Exception e){
LOG.error(EVENT, "There was an exception while processing Event [{}]", recordEvent, e);
LOG.atError()
.addMarker(EVENT)
.addMarker(NOISY)
.setMessage("There was an exception while processing Event [{}]")
.addArgument(recordEvent)
.setCause(e)
.log();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
package org.opensearch.dataprepper.plugins.processor.mutateevent;

import org.opensearch.dataprepper.expression.ExpressionEvaluator;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.metrics.PluginMetrics;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
Expand All @@ -24,8 +26,6 @@
import java.util.List;
import java.util.Objects;

import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;

@DataPrepperPlugin(name = "convert_entry_type", pluginType = Processor.class, pluginConfigurationType = ConvertEntryTypeProcessorConfig.class)
public class ConvertEntryTypeProcessor extends AbstractProcessor<Record<Event>, Record<Event>> {
private static final Logger LOG = LoggerFactory.getLogger(ConvertEntryTypeProcessor.class);
Expand Down Expand Up @@ -91,7 +91,13 @@ public Collection<Record<Event>> doExecute(final Collection<Record<Event>> recor
}
}
} catch (final Exception e) {
LOG.error(EVENT, "There was an exception while processing Event [{}]", recordEvent, e);
LOG.atError()
.addMarker(EVENT)
.addMarker(NOISY)
.setMessage("There was an exception while processing Event [{}]")
.addArgument(recordEvent)
.setCause(e)
.log();
recordEvent.getMetadata().addTags(tagsOnFailure);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
package org.opensearch.dataprepper.plugins.processor.mutateevent;

import org.opensearch.dataprepper.expression.ExpressionEvaluator;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;
import static org.opensearch.dataprepper.logging.DataPrepperMarkers.NOISY;
import org.opensearch.dataprepper.metrics.PluginMetrics;
import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin;
import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor;
Expand All @@ -24,8 +26,6 @@
import java.util.Map;
import java.util.Objects;

import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT;

@DataPrepperPlugin(name = "copy_values", pluginType = Processor.class, pluginConfigurationType = CopyValueProcessorConfig.class)
public class CopyValueProcessor extends AbstractProcessor<Record<Event>, Record<Event>> {
private static final Logger LOG = LoggerFactory.getLogger(CopyValueProcessor.class);
Expand Down Expand Up @@ -93,7 +93,13 @@ public Collection<Record<Event>> doExecute(final Collection<Record<Event>> recor
}
}
} catch (final Exception e) {
LOG.error(EVENT, "There was an exception while processing Event [{}]", recordEvent, e);
LOG.atError()
.addMarker(EVENT)
.addMarker(NOISY)
.setMessage("There was an exception while processing Event [{}]")
.addArgument(recordEvent)
.setCause(e)
.log();
}
}

Expand Down
Loading

0 comments on commit 87c560a

Please sign in to comment.