Tag Archives: Java

Elasticsearch REST Index Manager Auto Client for CRUD


Elasticsearch 5 REST Java Index Manager Auto Client can  help to manage index life from client end by setting configuration for keeping  index   open, close, delete indexes  for this no any third party tool required.

Below steps for auto  index management will save your time of index management manually and will take care of index life based on configure time.

Pre-requisite

  • Minimum requirement for Java 8 version required.
  • Add dependency for Elasticsearch REST and JSON Mapping in your pom.xml or add in your class path.
  • Index name format should be like IndexName-2017.06.10 for Ex. app1-logs-2017.06.08 if you have different date format change accordingly in below code.

We will follow below steps to create this client and auto run:

  • Create Java Maven Project ElasticsearchAutoIndexManager.
  • Add ElasticSearchIndexManagerClient in Project.
  • Test
  • Create auto run jar for project
  • Create script file for run auto jar
  • Create Cron Tab configuration for schedule and receive email alert.

Create Java Maven Project ElasticsearchAutoIndexManager

Create console based JAVA maven project as in below screen shot with name as ElasticsearchAutoIndexManager . To know more about Console based Java maven project follow link How to create Maven Java Console Project?

Elasticsearch REST Auto Client

Add below dependency in pom.xml file

<!--Elasticsearch REST jar-->
<dependency>
			<groupId>org.elasticsearch.client</groupId>
			<artifactId>rest</artifactId>
			<version>5.1.2</version>
</dependency>
<!--Jackson jar for mapping json to Java -->
<dependency>
			<groupId>com.fasterxml.jackson.core</groupId>
			<artifactId>jackson-databind</artifactId>
			<version>2.8.5</version>
</dependency>

Add below ElasticSearchIndexManagerClient class in com.facingissuesonit.es package and make below constant fields changes as per your server info and requirement.

Set INDEX_NO_ACTION_TIME so that till these days difference no action will take. For Example as set 2 means till  two days index will searchable in system.

private static final int INDEX_NO_ACTION_TIME = 2; 

Set INDEX_CLOSE_TIME so that all indexes will in close status means exist in elasticsearch server but not searchable.For Example as set 5 means if index life is more than five days  will close these indexes and keep it as long as Index delete time not reach.

private static final int INDEX_CLOSE_TIME = 5; 

Set INDEX_DELETE_TIME decide when to delete these indexes which match this criteria. For example as set 15 means will delete all indexes which are having index life more than 15 days.

private static final int INDEX_DELETE_TIME = 15;

private static final String ELASTICSEARCH_SERVER = “ServerHost”;

private static final int ELASTICSEARCH_SERVER_PORT = 9200;

Note : Set proxy server and login credential information if required else comment out.

package com.facingissuesonit.es;

import java.io.IOException;
import java.io.InputStream;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;

import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;

import com.fasterxml.jackson.databind.ObjectMapper;

public class ElasticSearchIndexManagerClient {
	private static final int INDEX_NO_ACTION_TIME = 2;
	private static final int INDEX_CLOSE_TIME = 5;
	private static final int INDEX_DELETE_TIME = 15;
	private static final String ELASTICSEARCH_SERVER = "ServerHost";
	private static final int ELASTICSEARCH_SERVER_PORT = 9200;
	public static void main(String[] args) {
		RestClient client;
		String indexName = "", indexDateStr = "";
		LocalDate indexDate = null;
		long days = 0;
		final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd");
		final LocalDate todayLocalDate = LocalDate.now();

		try {
			ElasticSearchIndexManagerClient esManager=new ElasticSearchIndexManagerClient();
			//Get Connection from Elasticsearch
			client=esManager.getElasticsearchConnectionClient();
			if(client!=null)
			{
				IndexDetail[] indexList = esManager.getIndexDetailList(client);

				if (indexList != null && indexList.length > 0) {
					for (IndexDetail indexDetail : indexList) {
						indexName = indexDetail.getIndexName();
						System.out.println(indexName);
						indexDateStr = indexName.substring(indexName.lastIndexOf("-") + 1);
						//Below code is for getting number of days difference from index creation ad current date
						try {
							indexDate = LocalDate.parse(indexDateStr.replace('.', '-'), formatter);
							days = ChronoUnit.DAYS.between(indexDate, todayLocalDate);
							esManager.performAction(indexDetail, days,client);
						} catch (Exception ex) {
							System.out.println("Index is not having formatted date as required : yyyy.MM.dd :"+indexName);
						}
					}
				}
			}
		} catch (Exception ex) {
			System.out.println("Exception found while index management");
			ex.printStackTrace();
			System.exit(1);
		} finally {
			System.out.println("Index Management successfully completed");
			System.exit(0);
		}
	}
	//Get Elasticsearch Connection
	private RestClient getElasticsearchConnectionClient() {
		final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
		credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials("userid", "password"));

		RestClient client = RestClient
				.builder(new HttpHost(ELASTICSEARCH_SERVER,ELASTICSEARCH_SERVER_PORT))
				.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {

					public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
						return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider)
								.setProxy(new HttpHost("ProxyHost", "ProxyPort"));

					}
				}).setMaxRetryTimeoutMillis(60000).build();
		return client;
	}
	//Get List of Indexes in Elaticsearxh Server
	public IndexDetail[] getIndexDetailList(RestClient client)
	{
		IndexDetail[] indexDetails=null;
		HttpEntity in=null;
		try
		{
		ObjectMapper jacksonObjectMapper = new ObjectMapper();
		Response response = client.performRequest("GET", "/_cat/indices?format=json&pretty", Collections.singletonMap("pretty", "true"));
		in =response.getEntity();
		indexDetails=jacksonObjectMapper.readValue(in.getContent(), IndexDetail[].class);
		System.out.println("Index found :"+indexDetails.length);
		}
		catch(IOException ex)
		{
			ex.printStackTrace();
		}

		return indexDetails;
	}
	//This Method Decide what action need to take based based Index creation date and configured date for No Action, close and Delete indexes
	private  void performAction(IndexDetail indexDetail, long days,RestClient client) {
		String indexName = indexDetail.getIndexName();
		if (days >= INDEX_NO_ACTION_TIME) {
			if (!(indexDetail.getStatus() != null && indexDetail.getStatus().equalsIgnoreCase("close"))) {
				// Close index condition
				if (days >= INDEX_CLOSE_TIME) {
					System.out.println("Close Index :" + indexName);
					closeIndex(indexName,client);
				}
			}
			// Delete index condition
			if (days >= INDEX_DELETE_TIME) {
				if (!(indexDetail.getStatus() != null && indexDetail.getStatus().equalsIgnoreCase("close"))) {
					System.out.println("Delete Index :" + indexName);
					deleteIndex(indexName,client);
				} else {
					System.out.println("Delete Close Index :" + indexName);
					deleteCloseIndex(indexName,client);
				}
			}
		}
	}

	// Operation on Indexes
		private  void closeIndex(String indexName,RestClient client) {

			flushIndex(indexName,client);
			postDocuments(indexName + "/_close", client);
			System.out.println("Close Index :" + indexName);
		}

		private  void deleteIndex(String indexName,RestClient client) {
			flushIndex(indexName,client);
			deleteDocument(indexName,client);
			System.out.println("Delete Index :" + indexName);
		}

		private  void deleteCloseIndex(String indexName,RestClient client) {
			openIndex(indexName,client);
			flushIndex(indexName,client);
			deleteDocument(indexName, client);
			System.out.println("Delete Close Index :" + indexName);
		}

		private  void openIndex(String indexName,RestClient client) {
			postDocuments(indexName + "/_open", client);
			System.out.println("Open Index :" + indexName);
		}

		private  void flushIndex(String indexName,RestClient client) {
			postDocuments(indexName + "/_flush", client);
			System.out.println("Flush Index :" + indexName);
			try {
				Thread.sleep(3000);
			} catch (InterruptedException ex) {
				ex.printStackTrace();
			}
		}
		//POST perform action used for creation and updation indexes
		public InputStream postDocuments(String endPoint,RestClient client)
		{
			InputStream in=null;
			Response response=null;
			try
			{
				response = client.performRequest("POST", endPoint, Collections.<String, String>emptyMap());
				in =response.getEntity().getContent();
			}
			catch(IOException ex)
			{
				System.out.println("Exception in post Documents :");
				ex.printStackTrace();
			}
			return in;
		}
		//DELETE perform action use for Deletion of Index
		public InputStream deleteDocument(String endPoint,RestClient client)
		{
			InputStream in=null;
			try
			{

		    Response response = client.performRequest("DELETE", endPoint, Collections.singletonMap("pretty", "true"));
			in =response.getEntity().getContent();
			}
			catch(IOException ex)
			{
				System.out.println("Exception in delete Documents :");
				ex.printStackTrace();
			}
			return in;
		}

}

In above code pretty state forward and step by step. Let’s me explain about below operation.

Open :  Index status open keep index available for searching and we can perform below operation like close and delete on indexes when it open status. For making Index open we can use below command in curl .

curl POST /indexName-2017.06.10/_open

Flush: This operation  is required before executing close and delete operation on indexes so that all running transactions on indexes complete.

curl POST /indexName-2017.06.10/_flush

Close : Close indexes persist in elasticsearch server but not available for searching. For making Index open we can use below command in curl .

curl POST /indexName-2017.06.10/_close

Delete : Delete operation on index will delete completely from server.

curl POST /indexName-2017.06.10/_delete

Now our code is ready to take care of indexes based on configured time and test . we test it after running above code.

Below steps are for making your index manager code auto runnable in Linux environment.

Create Auto Runnable Jar

Export ElasticsearchAutoIndexManager project as auto runnable jar by selecting as Launch class ElascticsearchIndexManagerClient. To learn about Auto runnable jar creation steps following link How to make and auto run /executable jar with dependencies?

Create Script File to Execute  Autorun Jar

Create script file as below with name as IndexManger.sh and save it.

#!/bin/bash
~/JAVA/jdk1.8.0_66/bin/java  -jar /opt/app/facingissuesonit/automate/IndexManagerClient.jar

Create Cron Tab configuration for schedule and receive email alert

Linux provide cron tab for executing schedule job/scripts. by using cron tab will execute  runnable jar by using above script file

  • Use command crontab -e to make and edit existing entries in cron tab.
  • Make below cron entry in this editor  for executing IndexManager.sh script on every night 1AM.
  • If you want to get execution alert to you and your team  with console logs also add your email id as below.
  • Save cron tab as ESC then (:wq)

Below are some more example for cron tab expression.

0 * * * *           : Run Every hour of day
* * * * *           : Every minute of day
30 4 * * *         : Run on 4:30 AM everyday
5 10,22 * * *   : Run twice on 10:05 and 22:05
5 0 * * *          : Run after Midnight

Read More

To read more on Elasticsearch REST , sample clients, configurations with example follow link Elasticsearch REST Tutorial and Elasticsearch Issues.

Hope this blog was helpful for you.

Leave you feedback to enhance more on this topic so that make it more helpful for others.

Java and Kafka Integration


Below examples are for Kafka Logs Producer and Consumer by Kafka Java API. Where Producer is sending logs from file to Topic1 on Kafka server and same logs Consumer is subscribing from Topic1. While Kafka Consumer can subscribe logs from multiple servers.

Pre-Requisite:

  • Kafka client work with Java 7 + versions.
  • Add Kafka library to your application class path from Installation directory

Kafka Logs Producer

Below Producer Example will create new topic as Topic1 in Kafka server if not exist and push all the messages in topic from below Test.txt file.

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.Properties;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

public class KafkaLogsProducer {

	public static void main(String[] args) throws Exception{

	    //Topic Name where logs message events need to publish
	    String topicName = "Topic1";
	    // create instance for properties to access producer configs
	    Properties props = new Properties();

	    //Kafka server host and port
	    props.put("bootstrap.servers", "kafkahost:9092");

	    //Will receive acknowledgemnt of requests
	    props.put("acks", "all");

	   //Buffer size of events
	    props.put("batch.size", 16384);

	   //Total available buffer memory to the producer .
	    props.put("buffer.memory", 33553333);

	    //request less than zero
	    props.put("linger.ms", 1);

	    //If the request get fails, then retry again,
	    props.put("retries", 0);

	    props.put("key.serializer",
	       "org.apache.kafka.common.serialization.StringSerializer");

	    props.put("value.serializer",
	       "org.apache.kafka.common.serialization.StringSerializer");

	    //Thread.currentThread().setContextClassLoader(null);
	    Producer producer = new KafkaProducer
	       (props);
	    File in = new File("C:\\Users\\Saurabh\\Desktop\\Test.txt");
	    try (BufferedReader br = new BufferedReader(new FileReader(in))) {
		    String line;
		    while ((line = br.readLine()) != null) {
		    	 producer.send(new ProducerRecord(topicName,
		    	          "message", line));
		    }
		}
	             System.out.println("All Messages sent successfully");
	             producer.close();
	 }
	}

Input File from Directory

C:\Users\Saurabh\Desktop\Test.txt

Hi
This is kafka Producer Test.
Now will check for Response.

Kafka Logs Consumer

Below Kafka Consumer will read from Topic1 and display output to console with offset value. Consumer can be read messages from multiple topics on same time.

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

public class KafkaLogsConsumer {

	public static void main(String[] args) {
		//Topics from where message need to consume
		 List topicsList=new ArrayList();
		 topicsList.add("Topic1");
		 //topicsList.add("Topic2");		

		  Properties props = new Properties();
	      props.put("bootstrap.servers", "kafkahost:9092");
	      props.put("group.id", "test");
	      props.put("enable.auto.commit", "true");
	      props.put("auto.commit.interval.ms", "1000");
	      props.put("session.timeout.ms", "30000");
	      props.put("key.deserializer",
	         "org.apache.kafka.common.serialization.StringDeserializer");
	      props.put("value.deserializer",
	         "org.apache.kafka.common.serialization.StringDeserializer");
	      KafkaConsumer consumer = new KafkaConsumer
	         (props);

	      //Kafka consumer subscribe to all these topics
	      consumer.subscribe(topicsList);

	      System.out.println("Subscribed to topic " + topicsList.get(0));

	      while (true) {
	    	 //Below poll setting will poll to kafka server in every 100 milliseconds
	    	 //and get logs mssage from there
	         ConsumerRecords records = consumer.poll(100);
	         for (ConsumerRecord record : records)
	         {
	        	//Print offset value of Kafka partition where logs message store and value for it
	        	 System.out.println(record.offset()+"-"+record.value());

	         }
	      }
	}
}

Kafka Consumer Output

1-Hi
2-This is kafka Producer Test.
3-Now will check for Response.

Let me know your thought on this post.

Happy Learning!!!

Read More on Kafka

Integration

Integrate Filebeat, Kafka, Logstash, Elasticsearch and Kibana