All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.newrelic.labs.abc.txt Maven / Gradle / Ivy

package com.newrelic.labs;

import java.util.Map;

public class LogEntry {
	private final String message;
	private final String applicationName;
	private final String name;
	private final String logtype;
	private final long timestamp;


	public LogEntry(String message, String applicationName, String name, String logtype, long timestamp,
			Map custom, boolean mergeCustomFields) {
		this.message = message;
		this.applicationName = applicationName;
		this.name = name;
		this.logtype = logtype;
		this.timestamp = timestamp;

	}

	public String getMessage() {
		return message;
	}

	public String getApplicationName() {
		return applicationName;
	}

	public String getName() {
		return name;
	}

	public String getLogType() {
		return logtype;
	}

	public long getTimestamp() {
		return timestamp;
	}
}package com.newrelic.labs;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.zip.GZIPOutputStream;

import com.fasterxml.jackson.databind.ObjectMapper;

import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;

public class LogForwarder {
	private final BlockingQueue logQueue;
	private final String apiKey;
	private final String apiURL;
	private final OkHttpClient client = new OkHttpClient();
	private final ObjectMapper objectMapper = new ObjectMapper();
	private final long maxMessageSize;

	public LogForwarder(String apiKey, String apiURL, long maxMessageSize, BlockingQueue logQueue) {
		this.apiKey = apiKey;
		this.apiURL = apiURL;
		this.maxMessageSize = maxMessageSize;
		this.logQueue = logQueue;
	}

	public boolean isInitialized() {
		return apiKey != null && apiURL != null;
	}

	public void flush(List logEntries, boolean mergeCustomFields, Map customFields) {
		InetAddress localhost = null;
		try {
			localhost = InetAddress.getLocalHost();
		} catch (UnknownHostException e) {
			e.printStackTrace();
		}

		String hostname = localhost != null ? localhost.getHostName() : "unknown";

		try {
			List> logEvents = new ArrayList<>();
			for (LogEntry entry : logEntries) {
				Map logEvent = objectMapper.convertValue(entry, LowercaseKeyMap.class);
				logEvent.put("hostname", hostname);
				logEvent.put("logtype", entry.getLogType());
				logEvent.put("timestamp", entry.getTimestamp());
				logEvent.put("applicationName", entry.getApplicationName());
				logEvent.put("name", entry.getName());
				logEvent.put("source", "NRBatchingAppender");

				// Add custom fields
				if (customFields != null) {
					if (mergeCustomFields) {
						// Traverse all keys and add each field separately
						Map customFields1 = customFields;
						for (Map.Entry field : customFields1.entrySet()) {
							logEvent.put(field.getKey(), field.getValue());
						}
					} else {
						// Directly add the custom fields as a single entry
						logEvent.put("custom", customFields);
					}
				}

				logEvents.add(logEvent);
			}

			String jsonPayload = objectMapper.writeValueAsString(logEvents);
			byte[] compressedPayload = gzipCompress(jsonPayload);

			if (compressedPayload.length > maxMessageSize) {
				splitAndSendLogs(logEntries, mergeCustomFields, customFields);
			} else {
				sendLogs(logEvents);
			}
		} catch (IOException e) {
			System.err.println("Error during log forwarding: " + e.getMessage());
		}
	}

	private void splitAndSendLogs(List logEntries, boolean mergeCustomFields,
			Map customFields) throws IOException {
		List subBatch = new ArrayList<>();
		int currentSize = 0;
		for (LogEntry entry : logEntries) {
			Map logEvent = objectMapper.convertValue(entry, LowercaseKeyMap.class);
			logEvent.put("hostname", InetAddress.getLocalHost().getHostName());
			logEvent.put("logtype", entry.getLogType());
			logEvent.put("timestamp", entry.getTimestamp());
			logEvent.put("applicationName", entry.getApplicationName());
			logEvent.put("name", entry.getName());
			logEvent.put("source", "NRBatchingAppender");

			// Add custom fields
			if (customFields != null) {
				if (mergeCustomFields) {
					// Traverse all keys and add each field separately
					Map customFields1 = customFields;
					for (Map.Entry field : customFields1.entrySet()) {
						logEvent.put(field.getKey(), field.getValue());
					}
				} else {
					// Directly add the custom fields as a single entry
					logEvent.put("custom", customFields);
				}
			}

			String entryJson = objectMapper.writeValueAsString(logEvent);
			int entrySize = gzipCompress(entryJson).length;
			if (currentSize + entrySize > maxMessageSize) {
				sendLogs(convertToLogEvents(subBatch, mergeCustomFields, customFields));
				subBatch.clear();
				currentSize = 0;
			}
			subBatch.add(entry);
			currentSize += entrySize;
		}
		if (!subBatch.isEmpty()) {
			sendLogs(convertToLogEvents(subBatch, mergeCustomFields, customFields));
		}
	}

	private List> convertToLogEvents(List logEntries, boolean mergeCustomFields,
			Map customFields) {
		List> logEvents = new ArrayList<>();
		try {
			InetAddress localhost = InetAddress.getLocalHost();
			String hostname = localhost.getHostName();

			for (LogEntry entry : logEntries) {
				Map logEvent = objectMapper.convertValue(entry, LowercaseKeyMap.class);
				logEvent.put("hostname", hostname);
				logEvent.put("logtype", entry.getLogType());
				logEvent.put("timestamp", entry.getTimestamp());
				logEvent.put("applicationName", entry.getApplicationName());
				logEvent.put("name", entry.getName());
				logEvent.put("source", "NRBatchingAppender");

				// Add custom fields
				if (customFields != null) {
					if (mergeCustomFields) {
						// Traverse all keys and add each field separately
						Map customFields1 = customFields;
						for (Map.Entry field : customFields1.entrySet()) {
							logEvent.put(field.getKey(), field.getValue());
						}
					} else {
						// Directly add the custom fields as a single entry
						logEvent.put("custom", customFields);
					}
				}

				logEvents.add(logEvent);
			}
		} catch (UnknownHostException e) {
			System.err.println("Error resolving local host: " + e.getMessage());
		}
		return logEvents;
	}

	private void sendLogs(List> logEvents) throws IOException {
		String jsonPayload = objectMapper.writeValueAsString(logEvents);
		byte[] compressedPayload = gzipCompress(jsonPayload);

		MediaType mediaType = MediaType.parse("application/json");

		RequestBody requestBody = RequestBody.create(compressedPayload, mediaType);
		Request request = new Request.Builder().url(apiURL).post(requestBody).addHeader("X-License-Key", apiKey)
				.addHeader("Content-Type", "application/json").addHeader("Content-Encoding", "gzip").build();

		try (Response response = client.newCall(request).execute()) {
			if (!response.isSuccessful()) {
				System.err.println("Failed to send logs to New Relic: " + response.code() + " - " + response.message());
				System.err.println("Response body: " + response.body().string());
				requeueLogs(logEvents); // Requeue logs if the response is not successful
			} else {
				// Comment out the following lines to prevent infinite loop
				// LocalDateTime timestamp = LocalDateTime.now();
				// System.out.println("Logs sent to New Relic successfully: " + "at " +
				// timestamp + " size: "
				// + compressedPayload.length + " Bytes");
				// System.out.println("Response: " + response.body().string());
			}
		} catch (IOException e) {
			System.err.println("Error during log forwarding: " + e.getMessage());
			requeueLogs(logEvents); // Requeue logs if an exception occurs
		}
	}

	private void requeueLogs(List> logEvents) {
		for (Map logEvent : logEvents) {
			try {
				LogEntry logEntry = objectMapper.convertValue(logEvent, LogEntry.class);
				logQueue.put(logEntry); // Requeue the log entry
			} catch (InterruptedException e) {
				Thread.currentThread().interrupt();
				System.err.println("Failed to requeue log entry: " + logEvent);
			}
		}
	}

	private byte[] gzipCompress(String input) throws IOException {
		ByteArrayOutputStream bos = new ByteArrayOutputStream();
		try (GZIPOutputStream gzipOS = new GZIPOutputStream(bos)) {
			gzipOS.write(input.getBytes());
		}
		return bos.toByteArray();
	}

	public void close(boolean mergeCustomFields, Map customFields) {
		List remainingLogs = new ArrayList<>();
		logQueue.drainTo(remainingLogs);
		if (!remainingLogs.isEmpty()) {
			System.out.println("Flushing remaining " + remainingLogs.size() + " log events to New Relic...");
			flush(remainingLogs, mergeCustomFields, customFields);
		}
	}
}

package com.newrelic.labs;

import java.util.HashMap;
import java.util.Map;

@SuppressWarnings("serial")
public class LowercaseKeyMap extends HashMap {
	@Override
	public Object put(String key, Object value) {
		return super.put(key.toLowerCase(), value);
	}

	@Override
	public void putAll(Map m) {
		for (Map.Entry entry : m.entrySet()) {
			this.put(entry.getKey().toLowerCase(), entry.getValue());
		}
	}
}package com.newrelic.labs;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.Filter;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.config.Property;
import org.apache.logging.log4j.core.config.plugins.Plugin;
import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
import org.apache.logging.log4j.core.config.plugins.PluginElement;
import org.apache.logging.log4j.core.config.plugins.PluginFactory;
import org.apache.logging.log4j.core.layout.PatternLayout;
import org.apache.logging.log4j.status.StatusLogger;

@Plugin(name = "NewRelicBatchingAppender", category = "Core", elementType = "appender", printObject = true)
public class NewRelicBatchingAppender extends AbstractAppender {

	private final BlockingQueue queue;

	private final String apiKey;
	private final String apiUrl;
	private final String applicationName;
	private final String logType;
	private final boolean mergeCustomFields;
	private final String name;
	private final LogForwarder logForwarder;
	private static final Logger logger = StatusLogger.getLogger();

	private final int batchSize;
	private final long maxMessageSize;
	private final long flushInterval;
	private final Map customFields;

	private static final int DEFAULT_BATCH_SIZE = 5000;
	private static final long DEFAULT_MAX_MESSAGE_SIZE = 1048576; // 1 MB
	private static final long DEFAULT_FLUSH_INTERVAL = 120000; // 2 minutes
	private static final String LOG_TYPE = "muleLog"; // defaultType
	private static final boolean MERGE_CUSTOM_FIELDS = false; // by default there will be a separate field custom block
																// for custom fields i.e. custom.attribute1

	protected NewRelicBatchingAppender(String name, Filter filter, Layout layout,
			final boolean ignoreExceptions, String apiKey, String apiUrl, String applicationName, Integer batchSize,
			Long maxMessageSize, Long flushInterval, String logType, String customFields, Boolean mergeCustomFields) {
		super(name, filter, layout, ignoreExceptions, Property.EMPTY_ARRAY);
		this.queue = new LinkedBlockingQueue<>();
		this.apiKey = apiKey;
		this.apiUrl = apiUrl;
		this.applicationName = applicationName;
		this.name = name;
		this.batchSize = batchSize != null ? batchSize : DEFAULT_BATCH_SIZE;
		this.maxMessageSize = maxMessageSize != null ? maxMessageSize : DEFAULT_MAX_MESSAGE_SIZE;
		this.flushInterval = flushInterval != null ? flushInterval : DEFAULT_FLUSH_INTERVAL;
		this.logType = ((logType != null) && (logType.length() > 0)) ? logType : LOG_TYPE;
		this.customFields = parsecustomFields(customFields);
		this.mergeCustomFields = mergeCustomFields != null ? mergeCustomFields : MERGE_CUSTOM_FIELDS;
		this.logForwarder = new LogForwarder(apiKey, apiUrl, this.maxMessageSize, this.queue);
		startFlushingTask();
	}

	private Map parsecustomFields(String customFields) {
		Map custom = new HashMap<>();
		if (customFields != null && !customFields.isEmpty()) {
			String[] pairs = customFields.split(",");
			for (String pair : pairs) {
				String[] keyValue = pair.split("=");
				if (keyValue.length == 2) {
					custom.put(keyValue[0], keyValue[1]);
				}
			}
		}
		return custom;
	}

	@PluginFactory
	public static NewRelicBatchingAppender createAppender(@PluginAttribute("name") String name,
			@PluginElement("Layout") Layout layout,
			@PluginElement("Filter") final Filter filter, @PluginAttribute("apiKey") String apiKey,
			@PluginAttribute("apiUrl") String apiUrl, @PluginAttribute("applicationName") String applicationName,
			@PluginAttribute(value = "batchSize") Integer batchSize,
			@PluginAttribute(value = "maxMessageSize") Long maxMessageSize, @PluginAttribute("logType") String logType,
			@PluginAttribute(value = "flushInterval") Long flushInterval,
			@PluginAttribute("customFields") String customFields,
			@PluginAttribute(value = "mergeCustomFields") Boolean mergeCustomFields) {

		if (name == null) {
			logger.error("No name provided for NewRelicBatchingAppender");
			return null;
		}

		if (layout == null) {
			layout = PatternLayout.createDefaultLayout();
		}

		if (apiKey == null || apiUrl == null || applicationName == null) {
			logger.error("API key, API URL, and application name must be provided for NewRelicBatchingAppender");
			return null;
		}

		return new NewRelicBatchingAppender(name, filter, layout, true, apiKey, apiUrl, applicationName, batchSize,
				maxMessageSize, flushInterval, logType, customFields, mergeCustomFields);
	}

	@Override
	public void append(LogEvent event) {
		if (!checkEntryConditions()) {
			logger.warn("Appender not initialized. Dropping log entry");
			return;
		}

		String message = new String(getLayout().toByteArray(event));
		String loggerName = event.getLoggerName();
		long timestamp = event.getTimeMillis(); // Capture the log creation timestamp

		// Extract MuleAppName from the message
		String muleAppName = extractMuleAppName(message);

		logger.debug("Queueing message for New Relic: " + message);

		try {
			// Extract custom fields from the event context
			Map custom = new HashMap<>(extractcustom(event));
			// Add static custom fields from configuration without a prefix
			for (Entry entry : this.customFields.entrySet()) {
				custom.putIfAbsent(entry.getKey(), entry.getValue());
			}
			// Directly add to the queue
			queue.add(
					new LogEntry(message, applicationName, muleAppName, logType, timestamp, custom, mergeCustomFields));
			// Check if the batch size is reached and flush immediately
			if (queue.size() >= batchSize) {
				flushQueue();
			}
		} catch (Exception e) {
			logger.error("Unable to insert log entry into log queue. ", e);
		}
	}

	private void flushQueue() {
		List batch = new ArrayList<>();
		queue.drainTo(batch, batchSize);
		if (!batch.isEmpty()) {
			logger.debug("Flushing {} log entries to New Relic", batch.size());
			logForwarder.flush(batch, mergeCustomFields, customFields);
		}
	}

	private Map extractcustom(LogEvent event) {
		Map custom = new HashMap<>();
		event.getContextData().forEach(custom::put);
		return custom;
	}

	private String extractMuleAppName(String message) {
		Pattern pattern = Pattern.compile("\\[.*?\\]\\..*?\\[([^\\]]+)\\]");
		Matcher matcher = pattern.matcher(message);
		if (matcher.find()) {
			return matcher.group(1);
		}
		return "generic";
	}

	private boolean checkEntryConditions() {
		boolean initialized = logForwarder != null && logForwarder.isInitialized();
		logger.debug("Check entry conditions: " + initialized);
		return initialized;
	}

	private void startFlushingTask() {
		Runnable flushTask = new Runnable() {
			@Override
			public void run() {
				while (true) {
					try {
						logger.debug("Flushing task running...");
						List batch = new ArrayList<>();
						queue.drainTo(batch, batchSize);
						if (!batch.isEmpty()) {
							logger.debug("Flushing {} log entries to New Relic", batch.size());
							logForwarder.flush(batch, mergeCustomFields, customFields);
						}
						Thread.sleep(flushInterval);
					} catch (InterruptedException e) {
						Thread.currentThread().interrupt();
						logger.error("Flushing task interrupted", e);
					}
				}
			}
		};

		Thread flushThread = new Thread(flushTask);
		flushThread.setDaemon(true);
		flushThread.start();

		// Log the configuration settings in use
		logger.info(
				"NewRelicBatchingAppender initialized with settings: batchSize={}, maxMessageSize={}, flushInterval={}",
				batchSize, maxMessageSize, flushInterval);
	}

	@Override
	public boolean stop(final long timeout, final TimeUnit timeUnit) {
		logger.debug("Stopping NewRelicBatchingAppender {}", getName());
		setStopping();
		final boolean stopped = super.stop(timeout, timeUnit, false);
		try {
			logForwarder.close(mergeCustomFields, customFields);
		} catch (Exception e) {
			logger.error("Unable to close appender", e);
		}
		setStopped();
		logger.debug("NewRelicBatchingAppender {} has been stopped", getName());
		return stopped;
	}
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy