Category Archives: JSON

How to MASK XML Confidential/Personal Data : JAVA

Here you will see all steps to mask confidential/ information in XML like credit card, CVV, Exp date,  SSN, password etc. So that it will print in mask form as ****** so that unauthorize use will not misuse of others information.

Pre- Requisite

Use below library or add in pom.xml.


<dependency>
    <groupId>org.jsoup</groupId>
    <artifactId>jsoup</artifactId>
    <version>1.10.2</version>
</dependency>

Here is same XML file as AccountDetail.xml where need to mask cardNumber, cvv and expDate .



<AccountList>
    <Account>
        <id>E001</id>
        <FirstName>Saurabh</FirstName>
        <LastName>Gupta</LastName>
        <AddressDetail>
            <AddressLine1>Noida City Center</AddressLine1>
            <City>Noida</City>
            <State>UP</State>
            <Pincode>201301</Pincode>
            <Contry>India</Contry>
        </AddressDetail>
        <CreditCardDetail>
            <CardNumber>1233454565676567</CardNumber>
            <CVV>456</CVV>
            <ExpDate>12/90</ExpDate>
        </CreditCardDetail>
    </Account>
    <Account>
        <id>E002</id>
        <FirstName>Ankur</FirstName>
        <LastName>Mehrotra</LastName>
        <AddressDetail>
            <AddressLine1>New Delhi Metro Station</AddressLine1>
            <City>New Delhi</City>
            <State>UP</State>
            <Pincode>210345</Pincode>
            <Contry>India</Contry>
        </AddressDetail>
        <CreditCardDetail>
            <CardNumber>8967452312123456</CardNumber>
            <CVV>876</CVV>
            <ExpDate>09/83</ExpDate>
        </CreditCardDetail>
    </Account>
    <Account>
        <id>E003</id>
        <FirstName>Shailesh</FirstName>
        <LastName>Nagar</LastName>
        <AddressDetail>
            <AddressLine1>Dwarka Metro Station</AddressLine1>
            <City>Delhi</City>
            <State>Delhi</State>
            <Pincode>345876</Pincode>
            <Contry>India</Contry>
        </AddressDetail>
        <CreditCardDetail>
            <CardNumber>9078563412345678</CardNumber>
            <CVV>986</CVV>
            <ExpDate>08/99</ExpDate>
        </CreditCardDetail>
    </Account>
</AccountList>

Java code to mask above XML. In below code cardNumber is masking partially so that show last four digits only and hide rest of numbers while cvv and exp date digits are masked completely.

package com.mask.xml;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.parser.Parser;
import org.jsoup.select.Elements;

public class MaskXML {

	public static void main(String[] args) {

		try {
			FileInputStream inputStream = new FileInputStream(
					new File("D:\\Saurabh Gupta\\Workspace\\JavaTestExamples\\src\\main\\resources\\UserAccountDetail.xml"));
			Document doc = Jsoup.parse(inputStream, "UTF-8", "", Parser.xmlParser());
			Elements toMaskTagCompletely = doc.select("Pincode,ExpDate,CVV");
			Elements toMaskTagPartially = doc.select("CardNumber");
			for (Element element : toMaskTagCompletely) {
				element.text(replaceDigits(element.text()));
			}
			for (Element element : toMaskTagPartially) {
				element.text("XXXXXXXXXXXX" + element.text().substring(element.text().length() - 4));
			}
			System.out.println(doc.toString());
		} catch (FileNotFoundException ex) {
			ex.printStackTrace();
		} catch (IOException ex) {
			ex.printStackTrace();
		}

	}

	private static String replaceDigits(String text) {
		StringBuffer buffer = new StringBuffer(text.length());
		Pattern pattern = Pattern.compile("\\d");
		Matcher matcher = pattern.matcher(text);
		while (matcher.find()) {
			matcher.appendReplacement(buffer, "X");
		}
		return buffer.toString();
	}

}

Result : Masked XML


<?xml version="1.0" encoding="UTF-8"?> 
<AccountList> 
 <Account> 
  <id>
   E001
  </id> 
  <FirstName>
   Saurabh
  </FirstName> 
  <LastName>
   Gupta
  </LastName> 
  <AddressDetail> 
   <AddressLine1>
    Noida City Center
   </AddressLine1> 
   <City>
    Noida
   </City> 
   <State>
    UP
   </State> 
   <Pincode>
    XXXXXX
   </Pincode> 
   <Contry>
    India
   </Contry> 
  </AddressDetail> 
  <CreditCardDetail> 
   <CardNumber>
    XXXXXXXXXXXX6567
   </CardNumber> 
   <CVV>
    XXX
   </CVV> 
   <ExpDate>
    XX/XX
   </ExpDate> 
  </CreditCardDetail> 
 </Account> 
 <Account> 
  <id>
   E002
  </id> 
  <FirstName>
   Ankur
  </FirstName> 
  <LastName>
   Mehrotra
  </LastName> 
  <AddressDetail> 
   <AddressLine1>
    New Delhi Metro Station
   </AddressLine1> 
   <City>
    New Delhi
   </City> 
   <State>
    UP
   </State> 
   <Pincode>
    XXXXXX
   </Pincode> 
   <Contry>
    India
   </Contry> 
  </AddressDetail> 
  <CreditCardDetail> 
   <CardNumber>
    XXXXXXXXXXXX3456
   </CardNumber> 
   <CVV>
    XXX
   </CVV> 
   <ExpDate>
    XX/XX
   </ExpDate> 
  </CreditCardDetail> 
 </Account> 
 <Account> 
  <id>
   E003
  </id> 
  <FirstName>
   Shailesh
  </FirstName> 
  <LastName>
   Nagar
  </LastName> 
  <AddressDetail> 
   <AddressLine1>
    Dwarka Metro Station
   </AddressLine1> 
   <City>
    Delhi
   </City> 
   <State>
    Delhi
   </State> 
   <Pincode>
    XXXXXX
   </Pincode> 
   <Contry>
    India
   </Contry> 
  </AddressDetail> 
  <CreditCardDetail> 
   <CardNumber>
    XXXXXXXXXXXX5678
   </CardNumber> 
   <CVV>
    XXX
   </CVV> 
   <ExpDate>
    XX/XX
   </ExpDate> 
  </CreditCardDetail> 
 </Account> 
</AccountList>

Summary

  • Example for mask XML.
  • Shared API and source code for masking XML in less code.
  • Shared code for Mask complete and partial text data for credit card, SSN, CVV etc.

Related Posts

Below are some more masking ways for different type of data like XML, JSON and printing objects before logging , sending to page or transferring over network.

Log4j2: How to Mask Logs Personal/Confidential/SPI Information

How to Mask JSON Confidential/Personal Information in logs :JAVA

How to mask JAVA Object confidential/personal information in logs while Printing

 

Advertisements

YAML and JAVA Configuration

Below are Tools which support YAML for JAVA API’s. Here I will focus only on SnakeYAML which widely used by industry.

  • JvYaml
  • SnakeYAML
  • YamlBeans
  • JYaml
  • Camel

SnakeYAML Configuration

JAVA


<dependency>
    <groupId>org.yaml</groupId>
    <artifactId>snakeyaml</artifactId>
    <version>1.20-SNAPSHOT</version>
  </dependency>

Android


<dependency>
  <groupId>org.yaml</groupId>
  <artifactId>snakeyaml</artifactId>
  <version>1.20-SNAPSHOT</version>
  <classifier>android</classifier>
</dependency>

For any operation with SnakeYAML api’s you have to use below steps:
package :

org.yaml.snakeyaml.Yaml

Intialization:
Yaml yaml = new Yaml();

Loading YAML :

  • yaml.load(String) accepts a String.
  • yaml.load(InputStream) accepts a InputStream.

yaml.load(InputStream) detects the encoding by checking the BOM (byte order mark) sequence at the beginning of streams. If no BOM presents the utf-8 encoding is assumed..

More

To know more about YAML Syntax, Configuration with Java and other supporting language, frameworks and tools, Sample configuration files and JSON and YAML conversion follow below YAML Tutorials and YAML related exceptions follow YAML Issues.

JAVA : How to convert YAML Documents to JSON List?

Here is code to convert YAML documents to JSON objects by Jackson and snakeyml apis. Jackson also support YAML support.

Pre-Requisite



        <dependencies>
        <!-- Jackson JSON Processor -->
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
            <version>2.4.1</version>
        </dependency>
        <!-- For YAML -->
        <dependency>
            <groupId>org.yaml</groupId>
            <artifactId>snakeyaml</artifactId>
            <version>1.21</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.dataformat</groupId>
            <artifactId>jackson-dataformat-yaml</artifactId>
            <version>2.1.2</version>
        </dependency>
    </dependencies>   

Sample YAML Documents File


---
# My personal record
name: Saurabh Kumar Gupta
Title: Sr. Project Lead
skill: JAVA/J2EE
employed: True
domains:
    - Telecom
    - Finance
    - Banking
    - Healthcare
languages:
    ELK: Medium
    JAVA: Expertize
    Scripting: Comfortable
education: |
    MCA
    B.Sc
    Diploma

---
# Gaurav personal record
name: Gaurav Gupta
Title: Project Lead
skill: ELK
employed: True
domains:
    - Telecom
    - Banking
    - Healthcare
languages:
    ELK: Medium
    JAVA: Expertize
    Scripting: Comfortable
    Bigdata: Expertize
education: |
    MCA
    B.Sc

Code to Convert YAML documents to JSON Objects

package com.fiot.examples.yaml;

import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Iterator;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.constructor.SafeConstructor;

public class ConvertYAMLObjectsToJSON {
	public static void main(String[] args) {
		try (InputStream input = new FileInputStream(new File(
				"F:\\Workspace-Blog\\TestExamples\\src\\main\\resources\\YAMLDocument2.yaml"))) {
			Yaml yaml = new Yaml(new SafeConstructor());
			Iterator iterator = yaml.loadAll(input).iterator();
			while (iterator.hasNext()) {
				System.out.println(iterator.next());
			}
		} catch (Throwable e) {
			System.out.println("ERROR: " + e.getMessage());
		}
	}
}

Output


{name=Saurabh Kumar Gupta, Title=Sr. Project Lead, skill=JAVA/J2EE, employed=true, domains=[Telecom, Finance, Banking, Healthcare], languages={ELK=Medium, JAVA=Expertize, Scripting=Comfortable}, education=MCA
B.Sc
Diploma
}
{name=Gaurav Gupta, Title=Project Lead, skill=ELK, employed=true, domains=[Telecom, Banking, Healthcare], languages={ELK=Medium, JAVA=Expertize, Scripting=Comfortable, Bigdata=Expertize}, education=MCA
B.Sc}

More

To know more about YAML Syntax, Configuration with Java and other supporting language, frameworks and tools, Sample configuration files and JSON and YAML conversion follow below YAML Tutorials and YAML related exceptions follow YAML Issues.

JAVA : How to convert YAML To JSON?

Here is code to convert YAML document to JSON by Jackson and snakeyml apis. Jackson also support YAML support.

Pre-Requisite


<dependencies>
        <!-- Jackson JSON Processor -->
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
            <version>2.4.1</version>
        </dependency>
        <!-- For YAML -->
        <dependency>
            <groupId>org.yaml</groupId>
            <artifactId>snakeyaml</artifactId>
            <version>1.21</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.dataformat</groupId>
            <artifactId>jackson-dataformat-yaml</artifactId>
            <version>2.1.2</version>
        </dependency>
    </dependencies>

Sample YAML File


---
# My personal record
name: Saurabh Kumar Gupta
Title: Sr. Project Lead
skill: JAVA/J2EE
employed: True
domains:
    - Telecom
    - Finance
    - Banking
    - Healthcare
languages:
    ELK: Medium
    JAVA: Expertize
    Scripting: Comfortable
education: |
    MCA
    B.Sc
    Diploma
...

Code to convert YAML to JSON data

package com.fiot.examples.yaml;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;

public class ConvertYAMLToJSON {
	public static void main(String[] args) {
		String content = "";
		try {
			content = new String(Files.readAllBytes(Paths.get(
					"F:\\Workspace-Blog\\TestExamples\\src\\main\\resources\\YAMLDocument.yaml")));
			System.out.println("*********Content from YAML File ****************");
			System.out.println(content);
			String json = convertYamlToJson(content);
			System.out.println("*********Cnverted JSON from YAML File ****************");
			System.out.println(json);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	private static String convertYamlToJson(String yaml) {
		try {
			ObjectMapper yamlReader = new ObjectMapper(new YAMLFactory());
			Object obj = yamlReader.readValue(yaml, Object.class);
			ObjectMapper jsonWriter = new ObjectMapper();
			return jsonWriter.writerWithDefaultPrettyPrinter().writeValueAsString(obj);
		} catch (JsonProcessingException ex) {
			ex.printStackTrace();
		} catch (IOException ex) {
			ex.printStackTrace();
		}
		return null;
	}
}

Output


*********Content from YAML File ****************
---
# My personal record
name: Saurabh Kumar Gupta
Title: Sr. Project Lead
skill: JAVA/J2EE
employed: True
domains:
    - Telecom
    - Finance
    - Banking
    - Healthcare
languages:
    ELK: Medium
    JAVA: Expertize
    Scripting: Comfortable
education: |
    MCA
    B.Sc
    Diploma
...
*********Cnverted JSON from YAML File ****************
{
  "name" : "Saurabh Kumar Gupta",
  "Title" : "Sr. Project Lead",
  "skill" : "JAVA/J2EE",
  "employed" : true,
  "domains" : [ "Telecom", "Finance", "Banking", "Healthcare" ],
  "languages" : {
    "ELK" : "Medium",
    "JAVA" : "Expertize",
    "Scripting" : "Comfortable"
  },
  "education" : "MCA\nB.Sc\nDiploma\n"
}

Below are some online tools to convert YAML/YML to JSON.
https://codebeautify.org/yaml-to-json-xml-csv
http://convertjson.com/yaml-to-json.htm

More

To know more about YAML Syntax, Configuration with Java and other supporting language, frameworks and tools, Sample configuration files and JSON and YAML conversion follow below YAML Tutorials and YAML related exceptions follow YAML Issues.

Difference between YAML and JSON

 

“YAML is superset of JSON”

Below are comparison between YAML and JSON by conceptually and writing differences

YAML vs JSON

  • YAML is best suited for configuration while JSON is better as a serialization format or serving up data for your APIs.
  • YAML is by no means  a replacement for JSON .You should use the data format that makes the most sense for what you are trying to accomplish.

YAML Advantage

  • YAML has a couple of big advantages  including the ability to self reference, support for complex datatypes, embedded block literals, comments, and more.
  • Write your configuration files in YAML format where you have the opportunity – it is designed to be readable and easily editable by humans.

JSON Disadvantage

  • JSON designed to be human readable – intentionally lacking features to support editing.
  • JSON doesn’t support comments – this is intentionally left out of the JSON specification because its not what the format was designed for.

JSON vs YAML

  • JSON is well suited for  serialization format to data interchange between apis over network.
  • JSON ships with a far simpler specification than YAML.
  • JSON  learning is faster in comparison to YAML, because it is not nearly as robust in its feature set.
  • YAML is a superset of JSON, which means you can parse JSON with a YAML parser.

JSON Advantage

  • JSON is  best to data interchange.

Disadvantage of YAML

  • YAML parsers are younger and  known to be less secure.
  • YAML is mainly designed for configuration when use for data interchange , many of YAMLs features lose their appeal.

Syntax Difference between YAML and JSON

Below are some syntax difference in YAML and JSON while writing files:

JSON Syntax

  • JSON is a subset of the JavaScript object notation syntax.
  • JSON data stored in name/value pairs.
  • JSON records separated by commas.
  • JSON field names & strings are wrapped by double quotes.

YAML Syntax

  • YAML stands for ain’t markup language and is a superset of JSON – You Convert YAML to JSON
  • YAML files begin with ‘—‘, marking the start of the document.
  • YAML documents end with ‘…’ but it’s optional.
  • YAML key value pairs are separated by colon.
  • YAML lists begin with a hyphen.

More

To know more about YAML Syntax, Configuration with Java and other supporting language, frameworks and tools, Sample configuration files and JSON and YAML conversion follow below YAML Tutorials and YAML related exceptions follow YAML Issues.

 

How to exclude fields from JSON while parsing by GSON/GsonBuilder?

In below previous exaxmples. We discussed about to convert JAVA object to/ from to JSON and pretty formatting and null serailization of fields for JSON conversion. Now here we discussed about how to exclude fields/object while converting to JSON.

How to convert Java object to / from JSON by (GSON)

How to do JSON pretty formatting and Null Serialization (GSON/GsonBuilder)

GSON provide two ways to exclude fields from JSON by GsonBuilder:

  • @Expose Annotation
  • Custom Annotation

@Expose Annotation

By using the @Expose annotations and then using the excludeFieldsWithoutExposeAnnotation() method on the GsonBuilder will ignore all fields except the ones that have been exposed using the @Expose annotation.

Custom Annotation

By defining a custom annotation and ignoring fields that are annotated with exclusion class by extending ExclusionStrategy interface implementing that by using below GsonBuilder methods can ingnore/exclude fields from JSON.

Boolean shouldSkipField(FieldAttributes f);

public boolean shouldSkipClass(Class clazz);

Pre-Requisite:

  • JAVA 8
  • Maven 3
  • GSON Jar as below.

GSON Dependency:

 &lt;dependency&gt;
    &lt;groupId&gt;com.google.code.gson&lt;/groupId&gt;
    &lt;artifactId&gt;gson&lt;/artifactId&gt;
    &lt;version&gt;2.8.2&lt;/version&gt;
 &lt;/dependency&gt;

In below example fields middleName,educationDetail and experienceDetail will not serialize because not having @Expose annotation on it and field country will also not serialize because of having @Country annotation. Here you will see how to unsearilize all these

package gsonexamples;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;

import model.AlbumImages;
import model.Albums;
import model.Country;
import model.Dataset;
import model.Employee;
import util.CustomExclusionStrategy;

class GsonConvertJavaObjectToJSONExclusion {

	public static void main(String[] args) {
		Employee employee= getJavaObject();
		//CustomExclusionStrategy that will exclude the Country field.
// We also allow only those fields that have been exposed using the @Expose
//annotation

				Gson gson = new GsonBuilder()
				.setPrettyPrinting()
				.serializeNulls()
				.setExclusionStrategies(new CustomExclusionStrategy(Country.class))
				.excludeFieldsWithoutExposeAnnotation()
				.create();

		System.out.println(gson.toJson(employee));

	}

	public static  Employee getJavaObject()
	{
		Employee employee=new Employee(&quot;C123&quot;,&quot;Saurabh&quot; ,&quot;Kumar&quot;,&quot;Gupta&quot;,&quot;Tech Lead&quot;,50000,&quot;Alpharetta,GA&quot;,&quot;Dallas, TX&quot;,&quot;1231231230&quot;,null,null);
		return employee;
	}

}

package util;

import com.google.gson.ExclusionStrategy;
import com.google.gson.FieldAttributes;

import model.Country;
import model.Dataset;

/**
 * This class use custom exclusion policy. We want to ignore all fields that
 * have been annotated with the Country annotation. Note that we can also ignore
 * fields based on name or type. This same type policy can be applied to any *  class.
 *
 */
public class CustomExclusionStrategy implements ExclusionStrategy {

	private Class classToExclude;

	public CustomExclusionStrategy(Class classToExclude) {
		this.classToExclude = classToExclude;
	}

	// This method is called for all fields. if the method returns false the
	// field is excluded from serialization
	//@Override
	public boolean shouldSkipField(FieldAttributes f) {
		if (f.getAnnotation(Country.class) == null)
			return false;

		return true;
	}

	// This method is called for all classes. If the method returns false the class is excluded.
	//@Override
	public boolean shouldSkipClass(Class&lt;?&gt; clazz) {
		if (clazz.equals(classToExclude))
			return true;
		return false;
	}

}
package model;

import java.util.List;
import java.util.Set;

import com.google.gson.annotations.Expose;

public class Employee {
	@Expose
	private String employeeId;
	@Expose
	private String firstName;
	private String middleName;
	@Expose
	private String lastName;
	@Expose
	private String designation;
	private int salary;
	@Expose
	private String permanentAddress;
	private String mailingAddress;
	@Country
	private String country;
	@Expose
	private String mobile;
	private Set&lt;Education&gt; educationDetail;
	private List&lt;Experience&gt; expericeDetail;

	@Override
	public String toString() {
		return &quot;Employee [employeeId=&quot; + employeeId + &quot;, firstName=&quot; + firstName + &quot;, middleName=&quot; + middleName
				+ &quot;, lastName=&quot; + lastName + &quot;, designation=&quot; + designation + &quot;, salary=&quot; + salary
				+ &quot;, permanentAddress=&quot; + permanentAddress + &quot;, mailingAddress=&quot; + mailingAddress + &quot;, mobile=&quot; + mobile
				+ &quot;, educationDetail=&quot; + educationDetail + &quot;, expericeDetail=&quot; + expericeDetail + &quot;]&quot;;
	}

	public Employee(String employeeId, String firstName, String middleName, String lastName, String designation,
			int salary, String permanentAddress, String mailingAddress, String mobile, Set&lt;Education&gt; educationDetail,
			List&lt;Experience&gt; expericeDetail) {
		super();
		this.employeeId = employeeId;
		this.firstName = firstName;
		this.middleName = middleName;
		this.lastName = lastName;
		this.designation = designation;
		this.salary = salary;
		this.permanentAddress = permanentAddress;
		this.mailingAddress = mailingAddress;
		this.mobile = mobile;
		this.educationDetail = educationDetail;
		this.expericeDetail = expericeDetail;
	}
//Getter and setter of fields
	}

package model;

public class Education {
private String schoolOrCollegeName;
private String standard;
private String stream;
private double percentage;

@Override
public String toString() {
	return &quot;Education [schoolOrCollegeName=&quot; + schoolOrCollegeName + &quot;, standard=&quot; + standard + &quot;, stream=&quot; + stream
			+ &quot;, percentage=&quot; + percentage + &quot;]&quot;;
}
public Education(String schoolOrCollegeName, String standard, String stream, double percentage) {
	super();
	this.schoolOrCollegeName = schoolOrCollegeName;
	this.standard = standard;
	this.stream = stream;
	this.percentage = percentage;
}
}
package model;

import java.util.Date;

public class Experience {
private String companyName;
private String designation;
private Date startDate;
private Date endDate;
private double salary;
public Experience(String companyName, String designation, Date startDate, Date endDate, double salary) {
	super();
	this.companyName = companyName;
	this.designation = designation;
	this.startDate = startDate;
	this.endDate = endDate;
	this.salary = salary;
}
@Override
public String toString() {
	return &quot;Experience [companyName=&quot; + companyName + &quot;, designation=&quot; + designation + &quot;, startDate=&quot; + startDate
			+ &quot;, endDate=&quot; + endDate + &quot;, salary=&quot; + salary + &quot;]&quot;;
}
//getter and setter
}

Output

{
  &quot;employeeId&quot;: &quot;C123&quot;,
  &quot;firstName&quot;: &quot;Saurabh&quot;,
  &quot;lastName&quot;: &quot;Gupta&quot;,
  &quot;designation&quot;: &quot;Tech Lead&quot;,
  &quot;permanentAddress&quot;: &quot;Alpharetta,GA&quot;,
  &quot;mobile&quot;: &quot;1231231230&quot;
}

Here in above JSON output for Employee class fields middleName, educationDetail and experienceDetail not printed because @Expose annotation was not given on that and by using CustomExlusionStartegy skiping serialization for fields country because it’s having @Country annotation as we remove in class for serialization CustomExclusionStrategy for @Country annotation.

Now in next example will discuss about.

How to parse JSON data token by token by using GSON/JsonToken/JsonReader?

How to do JSON pretty formatting and Null Serialization (GSON/GsonBuilder)

In previous example use GSON comes with simple JAVA API’s to convert JAVA object to/from JSON and here I will use GsonBuilder to print format JSON and serialize null object values also.

How to convert Java object to / from JSON by (GSON)

Pre-Requisite:

  • JAVA 8
  • Maven 3
  • GSON Jar as below.

GSON Dependency:

 &lt;dependency&gt;
    &lt;groupId&gt;com.google.code.gson&lt;/groupId&gt;
    &lt;artifactId&gt;gson&lt;/artifactId&gt;
    &lt;version&gt;2.8.2&lt;/version&gt;
 &lt;/dependency&gt;

Convert JAVA object to/from JSON

package gsonexamples;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;

import model.Employee;

public class GsonConvertJavaToJSON {

	public static void main(String[] args) {
		//Create GSON Object
		//Gson gson = new Gson();
Gson gson = new GsonBuilder().setPrettyPrinting().serializeNulls().create();
		Employee employee=getJavaObject();
		String json=gson.toJson(employee);

        System.out.println(&quot;Convert Java Object To JSON&quot;);
		System.out.println(json);

		System.out.println(&quot;Convert JSON To Java Object&quot;);
		employee=gson.fromJson(json,Employee.class);
		System.out.println(employee);

	}
	public static  Employee getJavaObject()
	{
		Employee employee=new Employee(&quot;C123&quot;,&quot;Saurabh&quot; ,&quot;Kumar&quot;,&quot;Gupta&quot;,&quot;Tech Lead&quot;,50000,&quot;Alpharetta,GA&quot;,&quot;Dallas, TX&quot;,&quot;1231231230&quot;,null,null);
		return employee;
	}

}

  • GsonBuilder().setPrettyPrinting() :  This api is to generate pretty formatted JSON string.
  • GsonBuilder().searializeNulls() :  This api is to serialize null values in object.
  • GsonBuilder().create(): This api is to create GSON object to Java object to/from JSON.
package model;

import java.util.List;
import java.util.Set;

public class Employee {
private String employeeId;
private String firstName;
private String middleName;
private String lastName;
private String designation;
private int salary;
private String permanentAddress;
private String mailingAddress;
private String mobile;
private Set&lt;Education&gt; educationDetail;
private List&lt;Experience&gt; expericeDetail;

@Override
public String toString() {
	return &quot;Employee [employeeId=&quot; + employeeId + &quot;, firstName=&quot; + firstName + &quot;, middleName=&quot; + middleName
			+ &quot;, lastName=&quot; + lastName + &quot;, designation=&quot; + designation + &quot;, salary=&quot; + salary + &quot;, permanentAddress=&quot;
			+ permanentAddress + &quot;, mailingAddress=&quot; + mailingAddress + &quot;, mobile=&quot; + mobile + &quot;, educationDetail=&quot;
			+ educationDetail + &quot;, expericeDetail=&quot; + expericeDetail + &quot;]&quot;;
}
public Employee(String employeeId, String firstName, String middleName, String lastName, String designation, int salary,
		String permanentAddress, String mailingAddress, String mobile, Set&lt;Education&gt; educationDetail,
		List&lt;Experience&gt; expericeDetail) {
	super();
	this.employeeId = employeeId;
	this.firstName = firstName;
	this.middleName = middleName;
	this.lastName = lastName;
	this.designation = designation;
	this.salary = salary;
	this.permanentAddress = permanentAddress;
	this.mailingAddress = mailingAddress;
	this.mobile = mobile;
	this.educationDetail = educationDetail;
	this.expericeDetail = expericeDetail;
}
//Getter and Setter
}

package model;

public class Education {
private String schoolOrCollegeName;
private String standard;
private String stream;
private double percentage;

@Override
public String toString() {
	return &quot;Education [schoolOrCollegeName=&quot; + schoolOrCollegeName + &quot;, standard=&quot; + standard + &quot;, stream=&quot; + stream
			+ &quot;, percentage=&quot; + percentage + &quot;]&quot;;
}
public Education(String schoolOrCollegeName, String standard, String stream, double percentage) {
	super();
	this.schoolOrCollegeName = schoolOrCollegeName;
	this.standard = standard;
	this.stream = stream;
	this.percentage = percentage;
}
//getter and setter
}
package model;

import java.util.Date;

public class Experience {
private String companyName;
private String designation;
private Date startDate;
private Date endDate;
private double salary;
public Experience(String companyName, String designation, Date startDate, Date endDate, double salary) {
	super();
	this.companyName = companyName;
	this.designation = designation;
	this.startDate = startDate;
	this.endDate = endDate;
	this.salary = salary;
}
@Override
public String toString() {
	return &quot;Experience [companyName=&quot; + companyName + &quot;, designation=&quot; + designation + &quot;, startDate=&quot; + startDate
			+ &quot;, endDate=&quot; + endDate + &quot;, salary=&quot; + salary + &quot;]&quot;;
}
//getter and setter
}

Output:

Convert Java Object To JSON
{
  &quot;employeeId&quot;: &quot;C123&quot;,
  &quot;firstName&quot;: &quot;Saurabh&quot;,
  &quot;middleName&quot;: &quot;Kumar&quot;,
  &quot;lastName&quot;: &quot;Gupta&quot;,
  &quot;designation&quot;: &quot;Tech Lead&quot;,
  &quot;salary&quot;: 50000,
  &quot;permanentAddress&quot;: &quot;Alpharetta,GA&quot;,
  &quot;mailingAddress&quot;: &quot;Dallas, TX&quot;,
  &quot;mobile&quot;: &quot;1231231230&quot;,
  &quot;educationDetail&quot;: null,
  &quot;expericeDetail&quot;: null
}
&lt;span 				data-mce-type=&quot;bookmark&quot; 				id=&quot;mce_SELREST_start&quot; 				data-mce-style=&quot;overflow:hidden;line-height:0&quot; 				style=&quot;overflow:hidden;line-height:0&quot; 			&gt;&lt;/span&gt;
Convert JSON To Java Object
Employee [employeeId=C123, firstName=Saurabh, middleName=Kumar, lastName=Gupta, designation=Tech Lead, salary=50000, permanentAddress=Alpharetta,GA, mailingAddress=Dallas, TX, mobile=1231231230, educationDetail=null, expericeDetail=null]

In above JSON output is pretty formatted properly and  serialize null objects for education and experience. In next blog will explain about how to exclude fields while serializing/deserializing to/from JSON by GsonBuilder.

How to exclude fields from JSON while parsing by GSON/GsonBuilder?

How to convert Java object to / from JSON by (GSON)

GSON comes with simple JAVA API’s toJson()/fromJson() to convert JAVA object to/from JSON. Below is very simple example to convert Employee Java object to JSON and Vice versa. To know more about GSON follow the link GSON Introduction.

Pre-Requisite:

  • JAVA 8
  • Maven 3
  • GSON Jar as below.

GSON Dependency:

 &lt;dependency&gt;
    &lt;groupId&gt;com.google.code.gson&lt;/groupId&gt;
    &lt;artifactId&gt;gson&lt;/artifactId&gt;
    &lt;version&gt;2.8.2&lt;/version&gt;
 &lt;/dependency&gt;

Convert JAVA object to/from JSON

package gsonexamples;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;

import model.Employee;

public class GsonConvertJavaToJSON {

	public static void main(String[] args) {
		//Create GSON Object
		Gson gson = new Gson();
		&lt;span 				data-mce-type=&quot;bookmark&quot; 				id=&quot;mce_SELREST_start&quot; 				data-mce-style=&quot;overflow:hidden;line-height:0&quot; 				style=&quot;overflow:hidden;line-height:0&quot; 			&gt;&lt;/span&gt;
		Employee employee=getJavaObject();
		String json=gson.toJson(employee);

        System.out.println(&quot;Convert Java Object To JSON&quot;);
		System.out.println(json);

		System.out.println(&quot;Convert JSON To Java Object&quot;);
		employee=gson.fromJson(json,Employee.class);
		System.out.println(employee);

	}
	public static  Employee getJavaObject()
	{
		Employee employee=new Employee(&quot;C123&quot;,&quot;Saurabh&quot; ,&quot;Kumar&quot;,&quot;Gupta&quot;,&quot;Tech Lead&quot;,50000,&quot;Alpharetta,GA&quot;,&quot;Dallas, TX&quot;,&quot;1231231230&quot;,null,null);
		return employee;
	}

}

package model;

import java.util.List;
import java.util.Set;

public class Employee {
private String employeeId;
private String firstName;
private String middleName;
private String lastName;
private String designation;
private int salary;
private String permanentAddress;
private String mailingAddress;
private String mobile;
private Set&lt;Education&gt; educationDetail;
private List&lt;Experience&gt; expericeDetail;

@Override
public String toString() {
	return &quot;Employee [employeeId=&quot; + employeeId + &quot;, firstName=&quot; + firstName + &quot;, middleName=&quot; + middleName
			+ &quot;, lastName=&quot; + lastName + &quot;, designation=&quot; + designation + &quot;, salary=&quot; + salary + &quot;, permanentAddress=&quot;
			+ permanentAddress + &quot;, mailingAddress=&quot; + mailingAddress + &quot;, mobile=&quot; + mobile + &quot;, educationDetail=&quot;
			+ educationDetail + &quot;, expericeDetail=&quot; + expericeDetail + &quot;]&quot;;
}
public Employee(String employeeId, String firstName, String middleName, String lastName, String designation, int salary,
		String permanentAddress, String mailingAddress, String mobile, Set&lt;Education&gt; educationDetail,
		List&lt;Experience&gt; expericeDetail) {
	super();
	this.employeeId = employeeId;
	this.firstName = firstName;
	this.middleName = middleName;
	this.lastName = lastName;
	this.designation = designation;
	this.salary = salary;
	this.permanentAddress = permanentAddress;
	this.mailingAddress = mailingAddress;
	this.mobile = mobile;
	this.educationDetail = educationDetail;
	this.expericeDetail = expericeDetail;
}
//Getter and Setter
}

package model;

public class Education {
private String schoolOrCollegeName;
private String standard;
private String stream;
private double percentage;

@Override
public String toString() {
	return &quot;Education [schoolOrCollegeName=&quot; + schoolOrCollegeName + &quot;, standard=&quot; + standard + &quot;, stream=&quot; + stream
			+ &quot;, percentage=&quot; + percentage + &quot;]&quot;;
}
public Education(String schoolOrCollegeName, String standard, String stream, double percentage) {
	super();
	this.schoolOrCollegeName = schoolOrCollegeName;
	this.standard = standard;
	this.stream = stream;
	this.percentage = percentage;
}
//getter and setter
}
package model;

import java.util.Date;

public class Experience {
private String companyName;
private String designation;
private Date startDate;
private Date endDate;
private double salary;
public Experience(String companyName, String designation, Date startDate, Date endDate, double salary) {
	super();
	this.companyName = companyName;
	this.designation = designation;
	this.startDate = startDate;
	this.endDate = endDate;
	this.salary = salary;
}
@Override
public String toString() {
	return &quot;Experience [companyName=&quot; + companyName + &quot;, designation=&quot; + designation + &quot;, startDate=&quot; + startDate
			+ &quot;, endDate=&quot; + endDate + &quot;, salary=&quot; + salary + &quot;]&quot;;
}
//getter and setter
}

Output:

Convert Java Object To JSON

{&quot;employeeId&quot;:&quot;C123&quot;,&quot;firstName&quot;:&quot;Saurabh&quot;,&quot;middleName&quot;:&quot;Kumar&quot;,&quot;lastName&quot;:&quot;Gupta&quot;,&quot;designation&quot;:&quot;Tech Lead&quot;,&quot;salary&quot;:50000,&quot;permanentAddress&quot;:&quot;Alpharetta,GA&quot;,&quot;mailingAddress&quot;:&quot;Dallas, TX&quot;,&quot;mobile&quot;:&quot;1231231230&quot;}

Convert JSON To Java Object

Employee [employeeId=C123, firstName=Saurabh, middleName=Kumar, lastName=Gupta, designation=Tech Lead, salary=50000, permanentAddress=Alpharetta,GA, mailingAddress=Dallas, TX, mobile=1231231230, educationDetail=null, expericeDetail=null]

In above JSON output is not formatted properly and not serialize null objects for education and experience. In next blog will explain about how to format JSON and serialize null values by using GsonBuilder.

How to do JSON pretty formatting and Null Serialization (GSON/GsonBuilder)

GSON Introduction

Gson is an open source Java library to serialize and de-serialize Java objects to/from JSON. Gson can work with arbitrary Java objects including pre-existing objects that you do not have source code.

Initial Release : May 22,2008
Written in: Java
Developed By: Google
Stable Release : 2.8.2 in 19 Sep, 2017
License: Apache License 2.0

Why GSON is Popular?

  • provide simple methods toJson() and fromJson() to convert Java objects to JSON and vice-versa.
  • Extensive support of Java Generics.
  • allow custom representaion of objects.
  • allow pre-existing unmodifiable objects to be converted to and from JSON.
  • support for complex objects with generic types and having deep inheritance.

Configuration/Dependency

Below dependency required to configure GSON in your application

&lt;dependency&gt;
&lt;groupId&gt;com.google.code.gson&lt;/groupId&gt;
&lt;artifactId&gt;gson&lt;/artifactId&gt;
&lt;version&gt;2.8.2&lt;/version&gt;
&lt;/dependency&gt;

How to Configure Filebeat, Kafka, Logstash Input , Elasticsearch Output and Kibana Dashboard

Filebeat, Kafka, Logstash, Elasticsearch and Kibana Integration is used for big organizations where applications deployed in production on hundreds/thousands of servers and scattered around different locations and need to do analysis on data from these servers on real time.

This integration helps mostly for log level analysis , tracking issues, anomalies with data and alerts on events of particular occurrence and where accountability measures.

By using these technology provide scalable architecture to enhance systems and decoupled of each other individually.

Why these Technology?

Filebeat :

  • Lightweight agent for shipping logs.
  • Forward and centralize files and logs.
  • Robust (Not miss a single beat)

Kafka:

  • Open source distributed, Steam Processing, Message Broker platform.
  • process stream data or transaction logs on real time.
  • fault-tolerant, high throughput, low latency platform for dealing real time data feeds.

Logstash:

  •  Open source, server-side data processing pipeline that accept data from a different  sources simultaneously.
  • Parse, Format, Transform data and send to different output sources.

Elasticsearch:

  • Elasticsearch is open source, distributed cross-platform.
  • Built on top of Lucene which provide full text search and provide NRT(Near real Time) search results.
  • Support RESTFUL search  by Elasticsearch REST

Kibana:

  • Open source
  • Provide window to view Elasticsearch data in form different charts and dashboard.
  • Provide way  searches and operation of data easily with respect to time interval.
  • Easily Imported by  any web application by embedded dashboards.

How Data flow works ?

In this integration filebeat will install in all servers where your application is deployed and filebeat will read and ship  latest logs changes from these servers to Kafka topic as configured for this application.

Logstash will subscribe log lines from kafka topic and perform parsing on these lines make relevant changes, formatting, exclude and include fields then send this processed data to Elasticsearch Indexes as centralize location from different servers.

Kibana  is linked with  Elasticsearch indexes which will help to do analysis by search, charts and dashboards .

FKLEK Integration

Design Architecture

In below configured architecture considering my application is deployed on three servers and each server having current log file name as App1.log . Our goal is read real time data from these servers and do analysis on these data.

FKLEK Arch Integration

Steps to Installation, Configuration and Start

Here first we will install Kafka and Elasticsearch run individually rest of tools will install and run sequence to test with data flow.  Initially install all in same machine  and test with sample data with below steps and at end of this post will tell about what changes need to make according to your servers.

  • Kafka Installation, Configuration and Start
  • Elasticsearch Installation,Configuration and Start
  • Filebeat Installation,Configuration and Start
  • Logstash Installation,Configuration and Start
  • Kibana Installation,Start and display.

Pre-Requisite

These Filebeat,Logstash, Elasticsearch and Kibana versions should be compatible better use latest from  https://www.elastic.co/downloads.

  • Java 8+
  • Linux Server
  • Filebeat 5.XX
  • Kafka 2.11.XX
  • Logstash 5.XX
  • Elasticsearch 5.XX
  • Kibana 5.XX

Note  : Make sure JDK 8 should be install  and JAVA_HOME environment variable point to JDK 8 home directory  wherever you want in install Elasticsearch, Logstash,Kibana and Kafka.

Window   : My computer ->right click-> Properties -> Advance System Settings->System Variable

Java_Home
Set JAVA_HOME

Linux : Go to your home directory/ sudo directory and below line as below .

export JAVA_HOME=/opt/app/facingissuesonit/jdk1.8.0_66

Sample Data

For testing we will use these sample log line which is having debug as well as stacktrace of logs and grok parsing of this example is designed according to it. For real time testing and actual data you can point to your server log files but you have to modify grok pattern in Logstash configuration accordingly.

2013-02-28 09:57:56,662 WARN  CreateSomethingActivationKey - WhateverException for User 49-123-345678 {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}
2013-02-28 09:57:56,663 INFO  LMLogger - ERR1700 - u:null failures: 0  - Technical error {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}
2013-02-28 09:57:56,668 ERROR SomeCallLogger - ESS10005 Cpc portalservices: Exception caught while writing log messege to MEA Call:  {}
java.sql.SQLSyntaxErrorException: ORA-00942: table or view does not exist

	at oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:445)
	at oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:396)
2013-02-28 10:04:35,723 INFO  EntryFilter - Fresh on request /portalservices/foobarwhatever {{rid,US8dogp5eZgAABwXPGEAAAAL_dev01_443}{realsid,56BA2AD41D9BB28AFCEEEFF927EE61C2.dev1-a}}

Create  App1.log file  in same machine where filebeat need to install and copy above logs lines in App1.log file.

Kafka Installation , Configuration and Start

Download latest version of Kafka from below link and use command to untar and installation in Linux server or if window just unzip downloaded file.

Download Link : https://kafka.apache.org/downloads

tar -zxvf kafka_2.11-0.10.0.0

For more configuration and start options follow Setup Kafka Cluster for Single Server/Broker

After download and untar/unzip file it will have below files and directory structure.

ls- l
drwxr-xr-x  3 facingissuesonit Saurabh   4096 Apr  3 05:18 bin
drwxr-xr-x  2 facingissuesonit Saurabh   4096 May  8 11:05 config
drwxr-xr-x 74 facingissuesonit Saurabh   4096 May 27 20:00 kafka-logs
drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr  3 05:17 libs
-rw-r--r--  1 facingissuesonit Saurabh  28824 Apr  3 05:17 LICENSE
drwxr-xr-x  2 facingissuesonit Saurabh 487424 May 27 20:00 logs
-rw-r--r--  1 facingissuesonit Saurabh    336 Apr  3 05:18 NOTICE
drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr  3 05:17 site-docs

For more details about all these files,configuration option and other integration options follow Kafka Tutorial.

Make below changes in files config/zookeeper.properties and config/server.properties

config/zookeeper.properties

clientPort=2181
config/server.properties:

broker.id=0
listeners=PLAINTEXT://:9092
log.dir=/kafka-logs
zookeeper.connect=localhost:2181

Now Kafka is configured and ready to run. Use below command to start zookeeper and Kafka server as  background process.

screen -d -m bin/zookeeper-server-start.sh config/zookeeper.properties
screen -d -m bin/kafka-server-start.sh config/server.properties

To test  Kafka  install successfully you can check by running Kafka process on Linux “ps -ef|grep kafka” or steps for consumer and producer to/from topic in Setup Kafka Cluster for Single Server/Broker.

Elasticsearch Installation,Configuration and Start

Download latest version of Elasticsearch from below link and use command to untar and installation in Linux server or if window just unzip downloaded file.

Download Link : https://www.elastic.co/downloads/elasticsearch

tar -zxvf elasticsearch-5.4.0.tar.gz

It will show below files and directory structure for Elasticsearch.

drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr 25 19:20 bin
drwxr-xr-x  3 facingissuesonit Saurabh   4096 May 13 17:27 config
drwxr-xr-x  3 facingissuesonit Saurabh   4096 Apr 24 15:56 data
drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr 17 10:55 lib
-rw-r--r--  1 facingissuesonit Saurabh  11358 Apr 17 10:50 LICENSE.txt
drwxr-xr-x  2 facingissuesonit Saurabh   4096 May 28 05:00 logs
drwxr-xr-x 12 facingissuesonit Saurabh   4096 Apr 17 10:55 modules
-rw-r--r--  1 facingissuesonit Saurabh 194187 Apr 17 10:55 NOTICE.txt
drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr 17 10:55 plugins
-rw-r--r--  1 facingissuesonit Saurabh   9540 Apr 17 10:50 README.textile

Before going to start Elasticsearch need to make some basic changes in config/elasticsearch.yml file for cluster  and node name. You can configure it based on you application or organization name.

cluster.name: FACING-ISSUE-IN-IT
node.name: TEST-NODE-1
#network.host: 0.0.0.0
http.port: 9200

Now we are ready with elasticsearch configuration and time start elasticsearch. We can use below command to run elasticsearch in background.

screen -d -m  /bin/elasticsearch

For  checking elasticsearch starts successfully you can use below url on browser  to know cluster status . You will get result like below.

http://localhost:9200/_cluster/health?pretty

or as below if network.host configured

http://elasticseverIp:9200/_cluster/health?pretty

Result :

{
  "cluster_name" : "FACING-ISSUE-IN-IT",
  "status" : "green",
  "timed_out" : false,
  "number_of_nodes" : 1,
  "number_of_data_nodes" : 1,
  "active_primary_shards" : 0,
  "active_shards" : 0,
  "relocating_shards" : 0,
  "initializing_shards" : 0,
  "unassigned_shards" : 0,
  "delayed_unassigned_shards" : 0,
  "number_of_pending_tasks" : 0,
  "number_of_in_flight_fetch" : 0,
  "task_max_waiting_in_queue_millis" : 0,
  "active_shards_percent_as_number" : 100.0
}

Filebeat Installation, Configuration and Start

Download latest version of filebeat from  below link and use  command to untar  and installation in Linux server. or if window just unzip downloaded file.

Download Link : https://www.elastic.co/downloads/beats/filebeat

tar -zxvf filebeat-&lt;version&gt;.tar.gz

For more configuration and start options follow Filebeat Download,Installation and Start/Run

After download and untar/unzip file it will have below files and directory structure.

ls- l
-rwxr-xr-x 1 facingissuesonit Saurabh 14908742 Jan 11 14:11 filebeat
-rw-r--r-- 1 facingissuesonit Saurabh    31964 Jan 11 14:11 filebeat.full.yml
-rw-r--r-- 1 facingissuesonit Saurabh     3040 Jan 11 14:11 filebeat.template-es2x.json
-rw-r--r-- 1 facingissuesonit Saurabh     2397 Jan 11 14:11 filebeat.template.json
-rw-r--r-- 1 facingissuesonit Saurabh     4196 Jan 11 14:11 filebeat.yml
-rw-r--r-- 1 facingissuesonit Saurabh      811 Jan 11 14:10 README.md
drwxr-xr-x 2 facingissuesonit Saurabh     4096 Jan 11 14:11 scripts

For more details about all these files,configuration option and other integration options follow Filebeat Tutorial.

Now filebeat is installaed and need to make below changes in filebeat.full.yml file

  • Inside prospectors section change paths to your log file location as
paths:
-/opt/app/facingissuesonit/App1.log
  • Comment out Elasticsearch Output default properties as below
#output.elasticsearch:
#hosts: [&quot;localhost:9200&quot;]
  • Configure multiline option as below so that all stacktrace line which are not starting with date  can we consider as single line.
multiline.pattern: ^\d
multiline.negate: true
multiline.match: after

For learn more on filebeat multiline configuration follow Filebeat Multiline Configuration Changes for Object, StackTrace and XML

  • Inside Kafka Output section update these properties hosts and topic. if Kafka on same machine then use localhost else update with IP of kafka machine.
output.kafka:
 hosts: [&quot;localhost:9092&quot;]
 topic: APP-1-TOPIC

For more on Logging configuration follow link Filebeat, Logging Configuration.

Now filebeat is configured and ready to start with  below command, it will read from configured prospector for file App1.log continiously and publish log line events to Kafka . It will also create topic as APP-1-TOPIC in Kafka if not exist.

./filebeat -e -c filebeat.full.yml -d &quot;publish&quot;

On console it will display output as below for sample lines.

2017/05/28 00:24:27.991828 client.go:184: DBG  Publish: {
  &quot;@timestamp&quot;: &quot;2017-05-28T00:24:22.991Z&quot;,
  &quot;beat&quot;: {
    &quot;hostname&quot;: &quot;sg02870&quot;,
    &quot;name&quot;: &quot;sg02870&quot;,
    &quot;version&quot;: &quot;5.1.2&quot;
  },
  &quot;input_type&quot;: &quot;log&quot;,
  &quot;message&quot;: &quot;2013-02-28 09:57:56,662 WARN  CreateSomethingActivationKey - WhateverException for User 49-123-345678 {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}&quot;,
  &quot;offset&quot;: 194,
  &quot;source&quot;: &quot;/opt/app/facingissuesonit/App1.log&quot;,
  &quot;type&quot;: &quot;log&quot;
}
2017/05/28 00:24:27.991907 client.go:184: DBG  Publish: {
  &quot;@timestamp&quot;: &quot;2017-05-28T00:24:22.991Z&quot;,
  &quot;beat&quot;: {
    &quot;hostname&quot;: &quot;sg02870&quot;,
    &quot;name&quot;: &quot;sg02870&quot;,
    &quot;version&quot;: &quot;5.1.2&quot;
  },
  &quot;input_type&quot;: &quot;log&quot;,
  &quot;message&quot;: &quot;2013-02-28 09:57:56,663 INFO  LMLogger - ERR1700 - u:null failures: 0  - Technical error {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}&quot;,
  &quot;offset&quot;: 375,
  &quot;source&quot;: &quot;/opt/app/facingissuesonit/App1.log&quot;,
  &quot;type&quot;: &quot;log&quot;
}
2017/05/28 00:24:27.991984 client.go:184: DBG  Publish: {
  &quot;@timestamp&quot;: &quot;2017-05-28T00:24:22.991Z&quot;,
  &quot;beat&quot;: {
    &quot;hostname&quot;: &quot;sg02870&quot;,
    &quot;name&quot;: &quot;sg02870&quot;,
    &quot;version&quot;: &quot;5.1.2&quot;
  },
  &quot;input_type&quot;: &quot;log&quot;,
  &quot;message&quot;: &quot;2013-02-28 09:57:56,668 ERROR SomeCallLogger - ESS10005 Cpc portalservices: Exception caught while writing log messege to MEA Call:  {}\njava.sql.SQLSyntaxErrorException: ORA-00942: table or view does not exist\n\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:445)\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:396)&quot;,
  &quot;offset&quot;: 718,
  &quot;source&quot;: &quot;/opt/app/facingissuesonit/App1.log&quot;,
  &quot;type&quot;: &quot;log&quot;
}
2017/05/28 00:24:27.991984 client.go:184: DBG  Publish: {
  &quot;@timestamp&quot;: &quot;2017-05-28T00:24:22.992Z&quot;,
  &quot;beat&quot;: {
    &quot;hostname&quot;: &quot;sg02870&quot;,
    &quot;name&quot;: &quot;sg02870&quot;,
    &quot;version&quot;: &quot;5.1.2&quot;
  },
  &quot;input_type&quot;: &quot;log&quot;,
  &quot;message&quot;: &quot;2013-02-28 10:04:35,723 INFO  EntryFilter - Fresh on request /portalservices/foobarwhatever {{rid,US8dogp5eZgAABwXPGEAAAAL_dev01_443}{realsid,56BA2AD41D9BB28AFCEEEFF927EE61C2.dev1-a}}&quot;,
  &quot;offset&quot;: 902,
  &quot;source&quot;: &quot;/opt/app/facingissuesonit/App1.log&quot;,
  &quot;type&quot;: &quot;log&quot;
}

Now you can see from above filebeat debug statements publish event 3 is having multiline statements with stacktrace exception and each debug will have these fields like.

@timestamp:  Timestamp of data shipped.

beat.hostname : filebeat machine name from where data is shipping.

beat.version: which version of filebeat installed on server that help for compatibility check on target end.

message : Log line from logs file or multline log lines

offset: it’s represent inode value in source file

source :  it’s file name from where logs were read

Now time to check data is publish to Kafka topic or not. For this go to below directory  and you will see two files as xyz.index and xyz.log for maintaining data offset and messages.

{Kafka_home}/kafk_logs/APP-1-TOPIC
          00000000000000000000.log
          00000000000000000000.index

Now your server log lines are in Kafka topic for reading and parsing  by Logstash and send it to elasticsearch for doing analysis/search on this data.

Logstash Installation, Configuration and Start

Download latest version of Logstash from below link and use command to untar and installation in Linux server or if window just unzip downloaded file.

Download Link : https://www.elastic.co/downloads/logstash

tar -zxvf logstash-5.4.0.tar.gz

It will show below file and directory structure.

drwxr-xr-x 2 facingissuesonit Saurabh   4096 Apr 20 11:27 bin
-rw-r--r-- 1 facingissuesonit Saurabh 111569 Mar 22 23:49 CHANGELOG.md
drwxr-xr-x 2 facingissuesonit Saurabh   4096 Apr 20 11:27 config
-rw-r--r-- 1 facingissuesonit Saurabh   2249 Mar 22 23:49 CONTRIBUTORS
drwxr-xr-x 3 facingissuesonit Saurabh   4096 Apr 20 12:07 data
-rw-r--r-- 1 facingissuesonit Saurabh   3945 Mar 22 23:55 Gemfile
-rw-r--r-- 1 facingissuesonit Saurabh  21544 Mar 22 23:49 Gemfile.jruby-1.9.lock
drwxr-xr-x 5 facingissuesonit Saurabh   4096 Apr 20 11:27 lib
-rw-r--r-- 1 facingissuesonit Saurabh    589 Mar 22 23:49 LICENSE
drwxr-xr-x 2 facingissuesonit Saurabh   4096 May 21 00:00 logs
drwxr-xr-x 4 facingissuesonit Saurabh   4096 Apr 20 11:27 logstash-core
drwxr-xr-x 3 facingissuesonit Saurabh   4096 Apr 20 11:27 logstash-core-event-java
drwxr-xr-x 3 facingissuesonit Saurabh   4096 Apr 20 11:27 logstash-core-plugin-api
drwxr-xr-x 3 facingissuesonit Saurabh   4096 Apr 20 11:27 logstash-core-queue-jruby
-rw-r--r-- 1 facingissuesonit Saurabh  28114 Mar 22 23:56 NOTICE.TXT
drwxr-xr-x 4 facingissuesonit Saurabh   4096 Apr 20 11:27 vendor

Before going to start Logstash need to create configuration file for taking input data from Kafka and parse these data in respected fields and send it elasticsearch. Create file logstash-app1.conf in logstash bin directory with below content.

/bin/logstash-app1.conf

input {
     kafka {
            bootstrap_servers =&gt; 'localhost:9092'
            topics =&gt; [&quot;APP-1-TOPIC&quot;]
            codec =&gt; json {}
          }
}
filter
{
//parse log line
      grok
	{
	match =&gt; {&quot;message&quot; =&gt; &quot;\A%{TIMESTAMP_ISO8601:timestamp}\s+%{LOGLEVEL:loglevel}\s+(?&lt;logger&gt;(?:[a-zA-Z0-9-]+\.)*[A-Za-z0-9$]+)\s+(-\s+)?(?=(?&lt;msgnr&gt;[A-Z]+[0-9]{4,5}))*%{DATA:message}({({[^}]+},?\s*)*})?\s*$(?&lt;stacktrace&gt;(?m:.*))?&quot; }
	}  

    #Remove unused fields
    #mutate { remove_field =&gt;[&quot;beat&quot;,&quot;@version&quot; ]}
}
output {
    #Output result sent to elasticsearch and dynamically create array
    elasticsearch {
        index  =&gt; &quot;app1-logs-%{+YYYY.MM.dd}&quot;
        hosts =&gt; [&quot;localhost:9200&quot;]
        sniffing =&gt; false
  	}

     #Sysout logs
     stdout
       {
         codec =&gt; rubydebug
       }
}

To test your configuration file you can use below command.


./logstash -t -f logstash-app1.conf

If  we get result OK from above command run below to start reading and parsing data from Kafka topic.


./logstash -f logstash-app1.conf

For design your own grok pattern for you logs line formatting you can follow below link that will help to generate incrementally and also provide some sample logs grok.

http://grokdebug.herokuapp.com and http://grokconstructor.appspot.com/

Logstash console will show parse data as below  and you can remove unsed fields for storing in elasticsearch by uncomment mutate section from configuration file.

{
    &quot;@timestamp&quot; =&gt; 2017-05-28T23:47:42.160Z,
        &quot;offset&quot; =&gt; 194,
      &quot;loglevel&quot; =&gt; &quot;WARN&quot;,
        &quot;logger&quot; =&gt; &quot;CreateSomethingActivationKey&quot;,
          &quot;beat&quot; =&gt; {
        &quot;hostname&quot; =&gt; &quot;zlp0287k&quot;,
            &quot;name&quot; =&gt; &quot;zlp0287k&quot;,
         &quot;version&quot; =&gt; &quot;5.1.2&quot;
    },
    &quot;input_type&quot; =&gt; &quot;log&quot;,
      &quot;@version&quot; =&gt; &quot;1&quot;,
        &quot;source&quot; =&gt; &quot;/opt/app/facingissuesonit/App1.log&quot;,
       &quot;message&quot; =&gt; [
        [0] &quot;2013-02-28 09:57:56,662 WARN  CreateSomethingActivationKey - WhateverException for User 49-123-345678 {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}&quot;,
        [1] &quot;WhateverException for User 49-123-345678 &quot;
    ],
          &quot;type&quot; =&gt; &quot;log&quot;,
     &quot;timestamp&quot; =&gt; &quot;2013-02-28 09:57:56,662&quot;
}
{
         &quot;msgnr&quot; =&gt; &quot;ERR1700&quot;,
    &quot;@timestamp&quot; =&gt; 2017-05-28T23:47:42.160Z,
        &quot;offset&quot; =&gt; 375,
      &quot;loglevel&quot; =&gt; &quot;INFO&quot;,
        &quot;logger&quot; =&gt; &quot;LMLogger&quot;,
          &quot;beat&quot; =&gt; {
        &quot;hostname&quot; =&gt; &quot;zlp0287k&quot;,
            &quot;name&quot; =&gt; &quot;zlp0287k&quot;,
         &quot;version&quot; =&gt; &quot;5.1.2&quot;
    },
    &quot;input_type&quot; =&gt; &quot;log&quot;,
      &quot;@version&quot; =&gt; &quot;1&quot;,
        &quot;source&quot; =&gt; &quot;/opt/app/facingissuesonit/App1.log&quot;,
       &quot;message&quot; =&gt; [
        [0] &quot;2013-02-28 09:57:56,663 INFO  LMLogger - ERR1700 - u:null failures: 0  - Technical error {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}&quot;,
        [1] &quot;ERR1700 - u:null failures: 0  - Technical error &quot;
    ],
          &quot;type&quot; =&gt; &quot;log&quot;,
     &quot;timestamp&quot; =&gt; &quot;2013-02-28 09:57:56,663&quot;
}
{
        &quot;offset&quot; =&gt; 718,
        &quot;logger&quot; =&gt; &quot;SomeCallLogger&quot;,
    &quot;input_type&quot; =&gt; &quot;log&quot;,

       &quot;message&quot; =&gt; [
        [0] &quot;2013-02-28 09:57:56,668 ERROR SomeCallLogger - ESS10005 Cpc portalservices: Exception caught while writing log messege to MEA Call:  {}\njava.sql.SQLSyntaxErrorException: ORA-00942: table or view does not exist\n\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:445)\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:396)&quot;,
        [1] &quot;ESS10005 Cpc portalservices: Exception caught while writing log messege to MEA Call:  &quot;
    ],
          &quot;type&quot; =&gt; &quot;log&quot;,
         &quot;msgnr&quot; =&gt; &quot;ESS10005&quot;,
    &quot;@timestamp&quot; =&gt; 2017-05-28T23:47:42.160Z,
    &quot;stacktrace&quot; =&gt; &quot;\njava.sql.SQLSyntaxErrorException: ORA-00942: table or view does not exist\n\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:445)\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:396)&quot;,
      &quot;loglevel&quot; =&gt; &quot;ERROR&quot;,
          &quot;beat&quot; =&gt; {
        &quot;hostname&quot; =&gt; &quot;zlp0287k&quot;,
            &quot;name&quot; =&gt; &quot;zlp0287k&quot;,
         &quot;version&quot; =&gt; &quot;5.1.2&quot;
    },
      &quot;@version&quot; =&gt; &quot;1&quot;,
     &quot;timestamp&quot; =&gt; &quot;2013-02-28 09:57:56,668&quot;
}
{
    &quot;@timestamp&quot; =&gt; 2017-05-28T23:47:42.160Z,
        &quot;offset&quot; =&gt; 903,
      &quot;loglevel&quot; =&gt; &quot;INFO&quot;,
        &quot;logger&quot; =&gt; &quot;EntryFilter&quot;,
          &quot;beat&quot; =&gt; {
        &quot;hostname&quot; =&gt; &quot;zlp0287k&quot;,
            &quot;name&quot; =&gt; &quot;zlp0287k&quot;,
         &quot;version&quot; =&gt; &quot;5.1.2&quot;
    },
    &quot;input_type&quot; =&gt; &quot;log&quot;,
      &quot;@version&quot; =&gt; &quot;1&quot;,

       &quot;message&quot; =&gt; [
        [0] &quot;2013-02-28 10:04:35,723 INFO  EntryFilter - Fresh on request /portalservices/foobarwhatever {{rid,US8dogp5eZgAABwXPGEAAAAL_dev01_443}{realsid,56BA2AD41D9BB28AFCEEEFF927EE61C2.dev1-a}}\n&quot;,
        [1] &quot;Fresh on request /portalservices/foobarwhatever &quot;
    ],
          &quot;type&quot; =&gt; &quot;log&quot;,
     &quot;timestamp&quot; =&gt; &quot;2013-02-28 10:04:35,723&quot;
}

To test on elasticsearch end your data sent  successfully  you can use this url
http://localhost:9200/_cat/indices  on your browser and will display created index with current date.

yellow open app1-logs-2017.05.28                             Qjs6XWiFQw2zsiVs9Ks6sw 5 1         4     0  47.3kb  47.3kb

Kibana Installation, Configuration and Start

Download latest version of Kibana from below link and use command to untar and installation in Linux server or if window just unzip downloaded file.

Download Link : https://www.elastic.co/downloads/kibana

tar -zxvf kibana-5.4.0.tar.gz

It will show below files and directory structure for kibana.

ls -l
drwxr-xr-x   2 facingissuesonit Saurabh   4096 May 22 14:23 bin
drwxr-xr-x   2 facingissuesonit Saurabh   4096 Apr 25 18:58 config
drwxr-xr-x   2 facingissuesonit Saurabh   4096 Apr 25 11:54 data
-rw-r--r--   1 facingissuesonit Saurabh    562 Apr 17 12:04 LICENSE.txt
drwxr-xr-x   6 facingissuesonit Saurabh   4096 Apr 17 12:04 node
drwxr-xr-x 485 facingissuesonit Saurabh  20480 Apr 17 12:04 node_modules
-rw-r--r--   1 facingissuesonit Saurabh 660429 Apr 17 12:04 NOTICE.txt
drwxr-xr-x   3 facingissuesonit Saurabh   4096 Apr 17 12:04 optimize
-rw-r--r--   1 facingissuesonit Saurabh    702 Apr 17 12:04 package.json
drwxr-xr-x   2 facingissuesonit Saurabh   4096 May 22 12:29 plugins
-rw-r--r--   1 facingissuesonit Saurabh   4909 Apr 17 12:04 README.txt
drwxr-xr-x  10 facingissuesonit Saurabh   4096 Apr 17 12:04 src
drwxr-xr-x   3 facingissuesonit Saurabh   4096 Apr 17 12:04 ui_framework
drwxr-xr-x   2 facingissuesonit Saurabh   4096 Apr 17 12:04 webpackShims

Before going to start Kibana need to make some basic changes in config/kibana.yml file make below changes after uncomment these properties file.

server.port: 5601
server.host: localhost
elasticsearch.url: "http://localhost:9200"

Now we are ready with Kibana configuration and time start Kibana. We can use below command to run Kibana in background.

screen -d -m  /bin/kibana

Kibana take time to start and we can test it by using below url in browser

http://localhost:5601/

For checking this data  in Kibana open above url in browser go to management tab on left side menu -> Index Pattern -> Click on Add New

Enter Index name or pattern and time field name as in below screen  and click on create button.

Kibana index setting
Index Pattern Settings

Now go to Discover Tab and select index as app1-log* will display data as below.

kibana discover data

Now make below changes according to  your application specification .

Filebeat :

  • update prospector path to your log directory current file
  •  Move Kafka on different machine because Kafka will single location where receive shipped data from different servers. Update localhost with same IP of kafka server in Kafka output section of filebeat.full.yml file  for hosts properties.
  • Copy same filebeat setup on all servers from where you application deployed and need to read logs.
  • Start all filebeat instances on each Server.

Elasticsearch :

  • Uncomment network.host properties from elasticsearch.yml file for accessing by  IP address.

Logstash:

  • Update localhost in logstash-app1.conf file input section with Kafka machine IP.
  • change grok pattern in filter section according to your logs format. You can take help from below url for incrementally design. http://grokdebug.herokuapp.com and http://grokconstructor.appspot.com/
  • Update localhost output section for elasticsearch with IP if moving on different machine.

Kibana:

  • update localhost in kibana.yml file for elasticsearch.url properties with IP if kibana on different machine.

Conclusion :

In this tutorial considers below points :

  • Installation of Filebeat, Kafka, Logstash, Elasticsearch and Kibana.
  • Filebeat is configured to shipped logs to Kafka Message Broker.
  • Logstash configured to read logs line from Kafka topic , Parse and shipped to Elasticsearch.
  • Kibana show these Elasticsearch information in form of chart and dashboard to users for doing analysis.

Read More

To read more on Filebeat, Kafka, Elasticsearch  configurations follow the links and Logstash Configuration,Input Plugins, Filter Plugins, Output Plugins, Logstash Customization and related issues follow Logstash Tutorial and Logstash Issues.

Hope this blog was helpful for you.

Leave you feedback to enhance more on this topic so that make it more helpful for others.

Reference  :

 https://www.elastic.co/products

 

Integrate Filebeat, Kafka, Logstash, Elasticsearch and Kibana

Filebeat, Kafka, Logstash, Elasticsearch and Kibana Integration is used for big organizations where applications deployed in production on hundreds/thousands of servers and scattered around different locations and need to do analysis on data from these servers on real time.

This integration helps mostly for log level analysis , tracking issues, anomalies with data and alerts on events of particular occurrence and where accountability measures.

By using these technology provide scalable architecture to enhance systems and decoupled of each other individually.

Why these Technology?

Filebeat :

  • Lightweight agent for shipping logs.
  • Forward and centralize files and logs.
  • Robust (Not miss a single beat)

Kafka:

  • Open source distributed, Steam Processing, Message Broker platform.
  • process stream data or transaction logs on real time.
  • fault-tolerant, high throughput, low latency platform for dealing real time data feeds.

Logstash:

  •   Open source, server-side data processing pipeline that accept data from a different  sources simultaneously.
  • Parse, Format, Transform data and send to different output sources.

Elasticsearch:

  • Elasticsearch is open source, distributed cross-platform.
  • Built on top of Lucene which provide full text search and provide NRT(Near real Time) search results.
  • Support RESTFUL search  by Elasticsearch REST

Kibana:

  • Open source
  • Provide window to view Elasticsearch data in form different charts and dashboard.
  • Provide way  searches and operation of data easily with respect to time interval.
  • Easily Imported by  any web application by embedded dashboards.

How Data flow works ?

In this integration filebeat will install in all servers where your application is deployed and filebeat will read and ship  latest logs changes from these servers to Kafka topic as configured for this application.

Logstash will subscribe log lines from kafka topic and perform parsing on these lines make relevant changes, formatting, exclude and include fields then send this processed data to Elasticsearch Indexes as centralize location from different servers.

Kibana  is linked with  Elasticsearch indexes which will help to do analysis by search, charts and dashboards .

FKLEK Integration

Design Architecture

In below configured architecture considering my application is deployed on three servers and each server having current log file name as App1.log . Our goal is read real time data from these servers and do analysis on these data.

FKLEK Arch Integration

Steps to Installation, Configuration and Start

Here first we will install Kafka and Elasticsearch run individually rest of tools will install and run sequence to test with data flow.  Initially install all in same machine  and test with sample data with below steps and at end of this post will tell about what changes need to make according to your servers.

  • Kafka Installation, Configuration and Start
  • Elasticsearch Installation,Configuration and Start
  • Filebeat Installation,Configuration and Start
  • Logstash Installation,Configuration and Start
  • Kibana Installation,Start and display.

Pre-Requisite

These Filebeat,Logstash, Elasticsearch and Kibana versions should be compatible better use latest from  https://www.elastic.co/downloads.

  • Java 8+
  • Linux Server
  • Filebeat 6.XX
  • Kafka 2.11.XX
  • Logstash 6.XX
  • Elasticsearch 6.XX
  • Kibana 6.XX

Note  : Make sure JDK 8 should be install  and JAVA_HOME environment variable point to JDK 8 home directory  wherever you want in install Elasticsearch, Logstash,Kibana and Kafka.

Window   : My computer ->right click-> Properties -> Advance System Settings->System Variable

Java_Home
Set JAVA_HOME

Linux : Go to your home directory/ sudo directory and below line as below .

export JAVA_HOME=/opt/app/facingissuesonit/jdk1.8.0_66

Sample Data

For testing we will use these sample log line which is having debug as well as stacktrace of logs and grok parsing of this example is designed according to it. For real time testing and actual data you can point to your server log files but you have to modify grok pattern in Logstash configuration accordingly.

2013-02-28 09:57:56,662 WARN  CreateSomethingActivationKey - WhateverException for User 49-123-345678 {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}
2013-02-28 09:57:56,663 INFO  LMLogger - ERR1700 - u:null failures: 0  - Technical error {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}
2013-02-28 09:57:56,668 ERROR SomeCallLogger - ESS10005 Cpc portalservices: Exception caught while writing log messege to MEA Call:  {}
java.sql.SQLSyntaxErrorException: ORA-00942: table or view does not exist

	at oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:445)
	at oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:396)
2013-02-28 10:04:35,723 INFO  EntryFilter - Fresh on request /portalservices/foobarwhatever {{rid,US8dogp5eZgAABwXPGEAAAAL_dev01_443}{realsid,56BA2AD41D9BB28AFCEEEFF927EE61C2.dev1-a}}

Create  App1.log file  in same machine where filebeat need to install and copy above logs lines in App1.log file.

Kafka Installation , Configuration and Start

Download latest version of Kafka from below link and use command to untar and installation in Linux server or if window just unzip downloaded file.

Download Link : https://kafka.apache.org/downloads

tar -zxvf kafka_2.11-0.10.0.0

For more configuration and start options follow Setup Kafka Cluster for Single Server/Broker

After download and untar/unzip file it will have below files and directory structure.

ls- l
drwxr-xr-x  3 facingissuesonit Saurabh   4096 Apr  3 05:18 bin
drwxr-xr-x  2 facingissuesonit Saurabh   4096 May  8 11:05 config
drwxr-xr-x 74 facingissuesonit Saurabh   4096 May 27 20:00 kafka-logs
drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr  3 05:17 libs
-rw-r--r--  1 facingissuesonit Saurabh  28824 Apr  3 05:17 LICENSE
drwxr-xr-x  2 facingissuesonit Saurabh 487424 May 27 20:00 logs
-rw-r--r--  1 facingissuesonit Saurabh    336 Apr  3 05:18 NOTICE
drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr  3 05:17 site-docs

For more details about all these files,configuration option and other integration options follow Kafka Tutorial.

Make below changes in files config/zookeeper.properties and config/server.properties

config/zookeeper.properties

clientPort=2181
config/server.properties:

broker.id=0
listeners=PLAINTEXT://:9092
log.dir=/kafka-logs
zookeeper.connect=localhost:2181

Now Kafka is configured and ready to run. Use below command to start zookeeper and Kafka server as  background process.

screen -d -m bin/zookeeper-server-start.sh config/zookeeper.properties
screen -d -m bin/kafka-server-start.sh config/server.properties

To test  Kafka  install successfully you can check by running Kafka process on Linux “ps -ef|grep kafka” or steps for consumer and producer to/from topic in Setup Kafka Cluster for Single Server/Broker.

Elasticsearch Installation,Configuration and Start

Download latest version of Elasticsearch from below link and use command to untar and installation in Linux server or if window just unzip downloaded file.

Download Link : https://www.elastic.co/downloads/elasticsearch

tar -zxvf elasticsearch-5.4.0.tar.gz

It will show below files and directory structure for Elasticsearch.

drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr 25 19:20 bin
drwxr-xr-x  3 facingissuesonit Saurabh   4096 May 13 17:27 config
drwxr-xr-x  3 facingissuesonit Saurabh   4096 Apr 24 15:56 data
drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr 17 10:55 lib
-rw-r--r--  1 facingissuesonit Saurabh  11358 Apr 17 10:50 LICENSE.txt
drwxr-xr-x  2 facingissuesonit Saurabh   4096 May 28 05:00 logs
drwxr-xr-x 12 facingissuesonit Saurabh   4096 Apr 17 10:55 modules
-rw-r--r--  1 facingissuesonit Saurabh 194187 Apr 17 10:55 NOTICE.txt
drwxr-xr-x  2 facingissuesonit Saurabh   4096 Apr 17 10:55 plugins
-rw-r--r--  1 facingissuesonit Saurabh   9540 Apr 17 10:50 README.textile

Before going to start Elasticsearch need to make some basic changes in config/elasticsearch.yml file for cluster  and node name. You can configure it based on you application or organization name.

cluster.name: FACING-ISSUE-IN-IT
node.name: TEST-NODE-1
#network.host: 0.0.0.0
http.port: 9200

Now we are ready with elasticsearch configuration and time start elasticsearch. We can use below command to run elasticsearch in background.

screen -d -m  /bin/elasticsearch

For  checking elasticsearch starts successfully you can use below url on browser  to know cluster status . You will get result like below.

http://localhost:9200/_cluster/health?pretty

or as below if network.host configured

http://elasticseverIp:9200/_cluster/health?pretty

Result :

{
  "cluster_name" : "FACING-ISSUE-IN-IT",
  "status" : "green",
  "timed_out" : false,
  "number_of_nodes" : 1,
  "number_of_data_nodes" : 1,
  "active_primary_shards" : 0,
  "active_shards" : 0,
  "relocating_shards" : 0,
  "initializing_shards" : 0,
  "unassigned_shards" : 0,
  "delayed_unassigned_shards" : 0,
  "number_of_pending_tasks" : 0,
  "number_of_in_flight_fetch" : 0,
  "task_max_waiting_in_queue_millis" : 0,
  "active_shards_percent_as_number" : 100.0
}

Filebeat Installation, Configuration and Start

Download latest version of filebeat from  below link and use  command to untar  and installation in Linux server. or if window just unzip downloaded file.

Download Link : https://www.elastic.co/downloads/beats/filebeat

tar -zxvf filebeat-<version>.tar.gz

For more configuration and start options follow Filebeat Download,Installation and Start/Run

After download and untar/unzip file it will have below files and directory structure.

ls- l
-rwxr-xr-x 1 facingissuesonit Saurabh 14908742 Jan 11 14:11 filebeat
-rw-r--r-- 1 facingissuesonit Saurabh    31964 Jan 11 14:11 filebeat.full.yml
-rw-r--r-- 1 facingissuesonit Saurabh     3040 Jan 11 14:11 filebeat.template-es2x.json
-rw-r--r-- 1 facingissuesonit Saurabh     2397 Jan 11 14:11 filebeat.template.json
-rw-r--r-- 1 facingissuesonit Saurabh     4196 Jan 11 14:11 filebeat.yml
-rw-r--r-- 1 facingissuesonit Saurabh      811 Jan 11 14:10 README.md
drwxr-xr-x 2 facingissuesonit Saurabh     4096 Jan 11 14:11 scripts

For more details about all these files,configuration option and other integration options follow Filebeat Tutorial.

Now filebeat is installaed and need to make below changes in filebeat.full.yml file

  • Inside prospectors section change paths to your log file location as
paths:
-/opt/app/facingissuesonit/App1.log
  • Comment out Elasticsearch Output default properties as below
#output.elasticsearch:
#hosts: ["localhost:9200"]
  • Configure multiline option as below so that all stacktrace line which are not starting with date  can we consider as single line.
multiline.pattern: ^\d
multiline.negate: true
multiline.match: after

For learn more on filebeat multiline configuration follow Filebeat Multiline Configuration Changes for Object, StackTrace and XML

  • Inside Kafka Output section update these properties hosts and topic. if Kafka on same machine then use localhost else update with IP of kafka machine.
output.kafka:
 hosts: ["localhost:9092"]
 topic: APP-1-TOPIC

For more on Logging configuration follow link Filebeat, Logging Configuration.

Now filebeat is configured and ready to start with  below command, it will read from configured prospector for file App1.log continiously and publish log line events to Kafka . It will also create topic as APP-1-TOPIC in Kafka if not exist.

./filebeat -e -c filebeat.full.yml -d "publish"

On console it will display output as below for sample lines.


2017/05/28 00:24:27.991828 client.go:184: DBG  Publish: {
  "@timestamp": "2017-05-28T00:24:22.991Z",
  "beat": {
    "hostname": "sg02870",
    "name": "sg02870",
    "version": "5.1.2"
  },
  "input_type": "log",
  "message": "2013-02-28 09:57:56,662 WARN  CreateSomethingActivationKey - WhateverException for User 49-123-345678 {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}",
  "offset": 194,
  "source": "/opt/app/facingissuesonit/App1.log",
  "type": "log"
}
2017/05/28 00:24:27.991907 client.go:184: DBG  Publish: {
  "@timestamp": "2017-05-28T00:24:22.991Z",
  "beat": {
    "hostname": "sg02870",
    "name": "sg02870",
    "version": "5.1.2"
  },
  "input_type": "log",
  "message": "2013-02-28 09:57:56,663 INFO  LMLogger - ERR1700 - u:null failures: 0  - Technical error {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}",
  "offset": 375,
  "source": "/opt/app/facingissuesonit/App1.log",
  "type": "log"
}
2017/05/28 00:24:27.991984 client.go:184: DBG  Publish: {
  "@timestamp": "2017-05-28T00:24:22.991Z",
  "beat": {
    "hostname": "sg02870",
    "name": "sg02870",
    "version": "5.1.2"
  },
  "input_type": "log",
  "message": "2013-02-28 09:57:56,668 ERROR SomeCallLogger - ESS10005 Cpc portalservices: Exception caught while writing log messege to MEA Call:  {}\njava.sql.SQLSyntaxErrorException: ORA-00942: table or view does not exist\n\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:445)\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:396)",
  "offset": 718,
  "source": "/opt/app/facingissuesonit/App1.log",
  "type": "log"
}
2017/05/28 00:24:27.991984 client.go:184: DBG  Publish: {
  "@timestamp": "2017-05-28T00:24:22.992Z",
  "beat": {
    "hostname": "sg02870",
    "name": "sg02870",
    "version": "5.1.2"
  },
  "input_type": "log",
  "message": "2013-02-28 10:04:35,723 INFO  EntryFilter - Fresh on request /portalservices/foobarwhatever {{rid,US8dogp5eZgAABwXPGEAAAAL_dev01_443}{realsid,56BA2AD41D9BB28AFCEEEFF927EE61C2.dev1-a}}",
  "offset": 902,
  "source": "/opt/app/facingissuesonit/App1.log",
  "type": "log"
}

Now you can see from above filebeat debug statements publish event 3 is having multiline statements with stacktrace exception and each debug will have these fields like.

@timestamp:  Timestamp of data shipped.

beat.hostname : filebeat machine name from where data is shipping.

beat.version: which version of filebeat installed on server that help for compatibility check on target end.

message : Log line from logs file or multline log lines

offset: it’s represent inode value in source file

source :  it’s file name from where logs were read

Now time to check data is publish to Kafka topic or not. For this go to below directory  and you will see two files as xyz.index and xyz.log for maintaining data offset and messages.

{Kafka_home}/kafk_logs/APP-1-TOPIC
          00000000000000000000.log
          00000000000000000000.index

Now your server log lines are in Kafka topic for reading and parsing  by Logstash and send it to elasticsearch for doing analysis/search on this data.

Logstash Installation, Configuration and Start

Download latest version of Logstash from below link and use command to untar and installation in Linux server or if window just unzip downloaded file.

Download Link : https://www.elastic.co/downloads/logstash

tar -zxvf logstash-5.4.0.tar.gz

It will show below file and directory structure.

drwxr-xr-x 2 facingissuesonit Saurabh   4096 Apr 20 11:27 bin
-rw-r--r-- 1 facingissuesonit Saurabh 111569 Mar 22 23:49 CHANGELOG.md
drwxr-xr-x 2 facingissuesonit Saurabh   4096 Apr 20 11:27 config
-rw-r--r-- 1 facingissuesonit Saurabh   2249 Mar 22 23:49 CONTRIBUTORS
drwxr-xr-x 3 facingissuesonit Saurabh   4096 Apr 20 12:07 data
-rw-r--r-- 1 facingissuesonit Saurabh   3945 Mar 22 23:55 Gemfile
-rw-r--r-- 1 facingissuesonit Saurabh  21544 Mar 22 23:49 Gemfile.jruby-1.9.lock
drwxr-xr-x 5 facingissuesonit Saurabh   4096 Apr 20 11:27 lib
-rw-r--r-- 1 facingissuesonit Saurabh    589 Mar 22 23:49 LICENSE
drwxr-xr-x 2 facingissuesonit Saurabh   4096 May 21 00:00 logs
drwxr-xr-x 4 facingissuesonit Saurabh   4096 Apr 20 11:27 logstash-core
drwxr-xr-x 3 facingissuesonit Saurabh   4096 Apr 20 11:27 logstash-core-event-java
drwxr-xr-x 3 facingissuesonit Saurabh   4096 Apr 20 11:27 logstash-core-plugin-api
drwxr-xr-x 3 facingissuesonit Saurabh   4096 Apr 20 11:27 logstash-core-queue-jruby
-rw-r--r-- 1 facingissuesonit Saurabh  28114 Mar 22 23:56 NOTICE.TXT
drwxr-xr-x 4 facingissuesonit Saurabh   4096 Apr 20 11:27 vendor

Before going to start Logstash need to create configuration file for taking input data from Kafka and parse these data in respected fields and send it elasticsearch. Create file logstash-app1.conf in logstash bin directory with below content.

/bin/logstash-app1.conf


input {
     kafka {
            bootstrap_servers => 'localhost:9092'
            topics => ["APP-1-TOPIC"]
            codec => json {}
          }
}
filter
{
//parse log line
      grok
    {
    match => {"message" => "\A%{TIMESTAMP_ISO8601:timestamp}\s+%{LOGLEVEL:loglevel}\s+(?(?:[a-zA-Z0-9-]+\.)*[A-Za-z0-9$]+)\s+(-\s+)?(?=(?[A-Z]+[0-9]{4,5}))*%{DATA:message}({({[^}]+},?\s*)*})?\s*$(?(?m:.*))?" }
    }  

    #Remove unused fields
    #mutate { remove_field =>["beat","@version" ]}
}
output {
    #Output result sent to elasticsearch and dynamically create array
    elasticsearch {
        index  => "app1-logs-%{+YYYY.MM.dd}"
        hosts => ["localhost:9200"]
        sniffing => false
    }

     #Sysout logs
     stdout
       {
         codec => rubydebug
       }
}

To test your configuration file you can use below command.


./logstash -t -f logstash-app1.conf

If  we get result OK from above command run below to start reading and parsing data from Kafka topic.


./logstash -f logstash-app1.conf

For design your own grok pattern for you logs line formatting you can follow below link that will help to generate incrementally and also provide some sample logs grok.

http://grokdebug.herokuapp.com and http://grokconstructor.appspot.com/

Logstash console will show parse data as below  and you can remove unsed fields for storing in elasticsearch by uncomment mutate section from configuration file.


{
    "@timestamp" => 2017-05-28T23:47:42.160Z,
        "offset" => 194,
      "loglevel" => "WARN",
        "logger" => "CreateSomethingActivationKey",
          "beat" => {
        "hostname" => "zlp02870",
            "name" => "zlp02870",
         "version" => "5.1.2"
    },
    "input_type" => "log",
      "@version" => "1",
        "source" => "/opt/app/facingissuesonit/App1.log",
       "message" => [
        [0] "2013-02-28 09:57:56,662 WARN  CreateSomethingActivationKey - WhateverException for User 49-123-345678 {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}",
        [1] "WhateverException for User 49-123-345678 "
    ],
          "type" => "log",
     "timestamp" => "2013-02-28 09:57:56,662"
}
{
         "msgnr" => "ERR1700",
    "@timestamp" => 2017-05-28T23:47:42.160Z,
        "offset" => 375,
      "loglevel" => "INFO",
        "logger" => "LMLogger",
          "beat" => {
        "hostname" => "zlp02870",
            "name" => "zlp02870",
         "version" => "5.1.2"
    },
    "input_type" => "log",
      "@version" => "1",
        "source" => "/opt/app/facingissuesonit/App1.log",
       "message" => [
        [0] "2013-02-28 09:57:56,663 INFO  LMLogger - ERR1700 - u:null failures: 0  - Technical error {{rid,US8cFAp5eZgAABwUItEAAAAI_dev01_443}{realsid,60A9772A136B9912B6FF0C3627A47090.dev1-a}}",
        [1] "ERR1700 - u:null failures: 0  - Technical error "
    ],
          "type" => "log",
     "timestamp" => "2013-02-28 09:57:56,663"
}
{
        "offset" => 718,
        "logger" => "SomeCallLogger",
    "input_type" => "log",

       "message" => [
        [0] "2013-02-28 09:57:56,668 ERROR SomeCallLogger - ESS10005 Cpc portalservices: Exception caught while writing log messege to MEA Call:  {}\njava.sql.SQLSyntaxErrorException: ORA-00942: table or view does not exist\n\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:445)\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:396)",
        [1] "ESS10005 Cpc portalservices: Exception caught while writing log messege to MEA Call:  "
    ],
          "type" => "log",
         "msgnr" => "ESS10005",
    "@timestamp" => 2017-05-28T23:47:42.160Z,
    "stacktrace" => "\njava.sql.SQLSyntaxErrorException: ORA-00942: table or view does not exist\n\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:445)\n\tat oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:396)",
      "loglevel" => "ERROR",
          "beat" => {
        "hostname" => "zlp02870",
            "name" => "zlp02870",
         "version" => "5.1.2"
    },
      "@version" => "1",
     "timestamp" => "2013-02-28 09:57:56,668"
}
{
    "@timestamp" => 2017-05-28T23:47:42.160Z,
        "offset" => 903,
      "loglevel" => "INFO",
        "logger" => "EntryFilter",
          "beat" => {
        "hostname" => "zlp02870",
            "name" => "zlp02870",
         "version" => "5.1.2"
    },
    "input_type" => "log",
      "@version" => "1",

       "message" => [
        [0] "2013-02-28 10:04:35,723 INFO  EntryFilter - Fresh on request /portalservices/foobarwhatever {{rid,US8dogp5eZgAABwXPGEAAAAL_dev01_443}{realsid,56BA2AD41D9BB28AFCEEEFF927EE61C2.dev1-a}}\n",
        [1] "Fresh on request /portalservices/foobarwhatever "
    ],
          "type" => "log",
     "timestamp" => "2013-02-28 10:04:35,723"
}

To test on elasticsearch end your data sent  successfully  you can use this url
http://localhost:9200/_cat/indices  on your browser and will display created index with current date.

yellow open app1-logs-2017.05.28                             Qjs6XWiFQw2zsiVs9Ks6sw 5 1         4     0  47.3kb  47.3kb

Kibana Installation, Configuration and Start

Download latest version of Kibana from below link and use command to untar and installation in Linux server or if window just unzip downloaded file.

Download Link : https://www.elastic.co/downloads/kibana

tar -zxvf kibana-5.4.0.tar.gz

It will show below files and directory structure for kibana.

ls -l
drwxr-xr-x   2 facingissuesonit Saurabh   4096 May 22 14:23 bin
drwxr-xr-x   2 facingissuesonit Saurabh   4096 Apr 25 18:58 config
drwxr-xr-x   2 facingissuesonit Saurabh   4096 Apr 25 11:54 data
-rw-r--r--   1 facingissuesonit Saurabh    562 Apr 17 12:04 LICENSE.txt
drwxr-xr-x   6 facingissuesonit Saurabh   4096 Apr 17 12:04 node
drwxr-xr-x 485 facingissuesonit Saurabh  20480 Apr 17 12:04 node_modules
-rw-r--r--   1 facingissuesonit Saurabh 660429 Apr 17 12:04 NOTICE.txt
drwxr-xr-x   3 facingissuesonit Saurabh   4096 Apr 17 12:04 optimize
-rw-r--r--   1 facingissuesonit Saurabh    702 Apr 17 12:04 package.json
drwxr-xr-x   2 facingissuesonit Saurabh   4096 May 22 12:29 plugins
-rw-r--r--   1 facingissuesonit Saurabh   4909 Apr 17 12:04 README.txt
drwxr-xr-x  10 facingissuesonit Saurabh   4096 Apr 17 12:04 src
drwxr-xr-x   3 facingissuesonit Saurabh   4096 Apr 17 12:04 ui_framework
drwxr-xr-x   2 facingissuesonit Saurabh   4096 Apr 17 12:04 webpackShims

Before going to start Kibana need to make some basic changes in config/kibana.yml file make below changes after uncomment these properties file.

server.port: 5601
server.host: localhost
elasticsearch.url: "http://localhost:9200"

Now we are ready with Kibana configuration and time start Kibana. We can use below command to run Kibana in background.

screen -d -m  /bin/kibana

Kibana take time to start and we can test it by using below url in browser

http://localhost:5601/

For checking this data  in Kibana open above url in browser go to management tab on left side menu -> Index Pattern -> Click on Add New

Enter Index name or pattern and time field name as in below screen  and click on create button.

Kibana index setting
Index Pattern Settings

Now go to Discover Tab and select index as app1-log* will display data as below.

kibana discover data

Now make below changes according to  your application specification .

Filebeat :

  • update prospector path to your log directory current file
  •  Move Kafka on different machine because Kafka will single location where receive shipped data from different servers. Update localhost with same IP of kafka server in Kafka output section of filebeat.full.yml file  for hosts properties.
  • Copy same filebeat setup on all servers from where you application deployed and need to read logs.
  • Start all filebeat instances on each Server.

Elasticsearch :

  • Uncomment network.host properties from elasticsearch.yml file for accessing by  IP address.

Logstash:

  • Update localhost in logstash-app1.conf file input section with Kafka machine IP.
  • change grok pattern in filter section according to your logs format. You can take help from below url for incrementally design. http://grokdebug.herokuapp.com and http://grokconstructor.appspot.com/
  • Update localhost output section for elasticsearch with IP if moving on different machine.

Kibana:

  • update localhost in kibana.yml file for elasticsearch.url properties with IP if kibana on different machine.

Know More

To know more about Elasticsearch, Logstash, Kibana , Kafka configuration and related issues follow below links:

Elasticsearch Interview Questions and Answers

 

 

 

How to retrieve key Information from JSON Data?

Jackson provide API’s to parse information based on Key  name.  It’s work like DOM parser and make tree of properties in JSON. Based on Path,Jackson retrieve this information for keys.

Pre-Requisite 

Add below jackson-databind-2.8.5.jar in your classpath or make dependency entry in pom.xml file.

  &lt;dependency&gt;
	&lt;groupId&gt;com.fasterxml.jackson.core&lt;/groupId&gt;
	&lt;artifactId&gt;jackson-databind&lt;/artifactId&gt;
	&lt;version&gt;2.8.5&lt;/version&gt;
  &lt;/dependency&gt;
 

Example

In below example we have some sample JSON Data for Student and by Jackson API will try to retrieve keys information like rollNumber and phoneNumbers.

Sample DATA

{
"rollNumber" : 11,
"firstName" : "Saurabh",
"lastName" : "Gupta",
"permanent" : false,
"address" : {
"addressLine" : "Lake Union Hill Way",
"city" : "Atlanta",
"zipCode" : 50005
},
"phoneNumbers" : [ 2233445566, 3344556677 ],
"cities" : [ "Dallas", "San Antonio", "Irving" ],
"properties" : {
"play" : "Badminton",
"interst" : "Math",
"age" : "34 years"
}
}

Sample Code

package test.facingissesonit.json.jacson;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Iterator;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;

public class ReadJsonByKeyName {

	public static void main(String[] args) {
		try
		{
		byte[] jsonData = Files.readAllBytes(Paths.get(&quot;student_data2.txt&quot;));

		ObjectMapper objectMapper = new ObjectMapper();

		//Jacson read JSON like DOM Parser and create tree of properties
		JsonNode rootNode = objectMapper.readTree(jsonData);

		JsonNode idNode = rootNode.path(&quot;rollNumber&quot;);
		System.out.println(&quot;rollNumber = &quot;+idNode.asInt());

		JsonNode phoneNosNode = rootNode.path(&quot;phoneNumbers&quot;);
		Iterator&lt;JsonNode&gt; elements = phoneNosNode.elements();
		while(elements.hasNext()){
			JsonNode phone = elements.next();
			System.out.println(&quot;Phone No = &quot;+phone.asLong());
		}
		}
		catch(JsonProcessingException ex)
		{
			ex.printStackTrace();
		}
		catch(IOException ex)
		{
			ex.printStackTrace();
		}
	}
}

Output

rollNumber = 11
Phone No = 2233445566
Phone No = 3344556677

More Sample Code

For more java and JDBC codes follow below links

How to Map JSON to Java Object by Annotation?

Jackson provide annotation for mapping Java and JSON properties that help when mapping fields in JSON and Java objects are having different names and some properties are additional and not match.

Pre-Requisite 

Add below jackson-databind-2.8.5.jar in your classpath or make dependency entry in pom.xml file.

  &lt;dependency&gt;
	&lt;groupId&gt;com.fasterxml.jackson.core&lt;/groupId&gt;
	&lt;artifactId&gt;jackson-databind&lt;/artifactId&gt;
	&lt;version&gt;2.8.5&lt;/version&gt;
  &lt;/dependency&gt;
 

Example

In below example showing to map Student object from JSON to Java  where field name id is in JSON while rollNumber is Java object and JAVA object also having additional field which is not mapped with any fields for this we have to use @JsonIgnoreProperties(ignoreUnknown = true) on class level to ignore this field or if some more.

Annotation Used

Ignore Unknown Properties
@JsonIgnoreProperties(ignoreUnknown = true)
JSON to Java properties match
@JsonProperty(value = &quot;XYZ&quot;)

JSON Sample Data

Add below JSON data in student_data.txt file to execute this program.

[
{
  &quot;id&quot; : 11,
  &quot;firstName&quot; : &quot;Saurabh&quot;,
  &quot;lastName&quot; : &quot;Gupta&quot;,
  &quot;permanent&quot; : false,
  &quot;address&quot; : {
    &quot;addressLine&quot; : &quot;Lake Union Hill Way&quot;,
    &quot;city&quot; : &quot;Atlanta&quot;,
    &quot;state&quot; : &quot;GA&quot;,
    &quot;zipCode&quot; : 50005
  },
  &quot;phoneNumbers&quot; : [ 2233445566, 3344556677 ],
  &quot;cities&quot; : [ &quot;Dallas&quot;, &quot;San Antonio&quot;, &quot;Irving&quot; ],
  &quot;properties&quot; : {
    &quot;play&quot; : &quot;Badminton&quot;,
    &quot;interst&quot; : &quot;Math&quot;,
    &quot;age&quot; : &quot;34 years&quot;
  }
},
{
  &quot;id&quot; : 12,
  &quot;firstName&quot; : &quot;Gaurav&quot;,
  &quot;lastName&quot; : &quot;Khaira&quot;,
  &quot;permanent&quot; : true,
  &quot;address&quot; : {
    &quot;addressLine&quot; : &quot; 5770 Shiloh woods dr&quot;,
    &quot;city&quot; : &quot;Cumming&quot;,
    &quot;state&quot; : &quot;GA&quot;,
    &quot;zipCode&quot; : 50007
  },
  &quot;phoneNumbers&quot; : [ 2233445567, 3344556678 ],
  &quot;cities&quot; : [ &quot;New York&quot;, &quot;Austin&quot;, &quot;Plano&quot; ],
  &quot;properties&quot; : {
    &quot;play&quot; : &quot;Baseball&quot;,
    &quot;interst&quot; : &quot;Science&quot;,
    &quot;age&quot; : &quot;36 years&quot;
  }
}
]

Example

package test.facingissesonit.json.jacson;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;

import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;

public class ConvertJsonToArrayList {

	public static void main(String[] args) {
		try
		{
		byte[] mapData = Files.readAllBytes(Paths.get(&quot;student_data.txt&quot;));
		Student[] studentArr = null;

		ObjectMapper objectMapper = new ObjectMapper();
		studentArr = objectMapper.readValue(mapData, Student[].class);
		List&lt;Student&gt; studentList=Arrays.asList(studentArr);
		System.out.println(&quot;Student 1 \n&quot;+studentList.get(0));
		System.out.println(&quot;Student 2 \n&quot;+studentList.get(1));

		}
		catch(JsonMappingException ex)
		{
			ex.printStackTrace();
		}
		catch(IOException ex)
		{
			ex.printStackTrace();
		}
	}
}

Model Class

Below Student and Address classes are required to execute this program. Here field rollNumber is mapping with property id and others property too and using @JsonIgnoreProperties(ignoreUnknown = true) for ignoring additional fields which are not mapped.

package test.facingissesonit.json.jacson;

import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;

import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
@JsonIgnoreProperties(ignoreUnknown = true)
public class Student {
	@JsonProperty(value = &quot;id&quot;)
	private int rollNumber;
	@JsonProperty(value = &quot;firstName&quot;)
	private String firstName;
	@JsonProperty(value = &quot;lastName&quot;)
	private String lastName;
	@JsonProperty(value = &quot;permanent&quot;)
	private boolean permanent;
	@JsonProperty(value = &quot;address&quot;)
	private Address address;
	@JsonProperty(value = &quot;phoneNumbers&quot;)
	private long[] phoneNumbers;
	@JsonProperty(value = &quot;cities&quot;)
	private List&lt;String&gt; cities;
	@JsonProperty(value = &quot;properties&quot;)
	private Map&lt;String, String&gt; properties;
	private Date dateOfJoining =new Date();
	@Override
	public String toString()
	{
		StringBuffer sb=new StringBuffer();
		sb.append(&quot;==============Student Information================\n&quot;);
		sb.append(&quot;rollNumber=&quot;).append(rollNumber).append(&quot;\n&quot;);
		sb.append(&quot;firstName=&quot;).append(firstName).append(&quot;\n&quot;);
		sb.append(&quot;lastName=&quot;).append(lastName).append(&quot;\n&quot;);
		sb.append(&quot;permanent=&quot;).append(permanent).append(&quot;\n&quot;);
		sb.append(&quot;adress=&quot;).append(address).append(&quot;\n&quot;);
		sb.append(&quot;phoneNumbers=&quot;).append(Arrays.toString(phoneNumbers)).append(&quot;\n&quot;);
		sb.append(&quot;cities=&quot;).append(Arrays.toString(cities.toArray(new String[cities.size()]))).append(&quot;\n&quot;);
		sb.append(&quot;properties=&quot;).append(properties).append(&quot;\n&quot;);
		sb.append(&quot;dateOfJoining=&quot;).append(dateOfJoining).append(&quot;\n&quot;);
		return sb.toString();
	}
	public int getRollNumber() {
		return rollNumber;
	}
	public void setRollNumber(int rollNumber) {
		this.rollNumber = rollNumber;
	}
	public String getFirstName() {
		return firstName;
	}
	public void setFirstName(String firstName) {
		this.firstName = firstName;
	}
	public String getLastName() {
		return lastName;
	}
	public void setLastName(String lastName) {
		this.lastName = lastName;
	}
	public boolean isPermanent() {
		return permanent;
	}
	public void setPermanent(boolean permanent) {
		this.permanent = permanent;
	}
	public Address getAddress() {
		return address;
	}
	public void setAddress(Address address) {
		this.address = address;
	}
	public long[] getPhoneNumbers() {
		return phoneNumbers;
	}
	public void setPhoneNumbers(long[] phoneNumbers) {
		this.phoneNumbers = phoneNumbers;
	}
	public List&lt;String&gt; getCities() {
		return cities;
	}
	public void setCities(List&lt;String&gt; cities) {
		this.cities = cities;
	}
	public Map&lt;String, String&gt; getProperties() {
		return properties;
	}
	public void setProperties(Map&lt;String, String&gt; properties) {
		this.properties = properties;
	}
	public Date getDateOfJoining() {
		return dateOfJoining;
	}
	public void setDateOfJoining(Date dateOfJoining) {
		this.dateOfJoining = dateOfJoining;
	}
}

package test.facingissesonit.json.jacson;

import com.fasterxml.jackson.annotation.JsonProperty;

public class Address {
@JsonProperty(value = &quot;addressLine&quot;)
private String addressLine;
@JsonProperty(value = &quot;city&quot;)
private String city;
@JsonProperty(value = &quot;state&quot;)
private String state;
@JsonProperty(value = &quot;zipCode&quot;)
private int zipCode;
@Override
public String toString()
{
	StringBuffer sb=new StringBuffer();
	sb.append(&quot;AddressLine=&quot;).append(addressLine).append(&quot;\n&quot;);
	sb.append(&quot;city=&quot;).append(city).append(&quot;\n&quot;);
	sb.append(&quot;state=&quot;).append(state).append(&quot;\n&quot;);
	sb.append(&quot;zipCode=&quot;).append(zipCode).append(&quot;\n&quot;);
	return sb.toString();
}
public String getAddressLine() {
	return addressLine;
}
public void setAddressLine(String addressLine) {
	this.addressLine = addressLine;
}
public String getCity() {
	return city;
}
public void setCity(String city) {
	this.city = city;
}
public String getState() {
	return state;
}
public void setState(String state) {
	this.state = state;
}
public int getZipCode() {
	return zipCode;
}
public void setZipCode(int zipCode) {
	this.zipCode = zipCode;
}
}

Output

Student 1
==============Student Information================
rollNumber=11
firstName=Saurabh
lastName=Gupta
permanent=false
adress=AddressLine=Lake Union Hill Way
city=Atlanta
state=GA
zipCode=50005

phoneNumbers=[2233445566, 3344556677]
cities=[Dallas, San Antonio, Irving]
properties={play=Badminton, interst=Math, age=34 years}
dateOfJoining=Sat May 20 22:41:52 EDT 2017

Student 2
==============Student Information================
rollNumber=12
firstName=Gaurav
lastName=Khaira
permanent=true
adress=AddressLine= 5770 Shiloh woods dr
city=Cumming
state=GA
zipCode=50007

phoneNumbers=[2233445567, 3344556678]
cities=[New York, Austin, Plano]
properties={play=Baseball, interst=Science, age=36 years}
dateOfJoining=Sat May 20 22:41:52 EDT 2017

More Sample Code

For more java and JDBC codes follow below links

How to create dynamic JSON by Java?

Jackson  provide Java api’s   to create JSON on runtime . These api’s can handle different type of data and objects.

Jacson API’s

Writing Root Object
JsonGenerator.writeStartObject();
JsonGenerator.writeEndObject();

Writing Sub Object
JsonGenerator.writeObjectFieldStart();
JsonGenerator.writeEndObject();

Writing Array
JsonGenerator.writeArrayFieldStart()
JsonGenerator.writeEndObject();

Writing Field Level
JsonGenerator.writeNumberField();
JsonGenerator.writeStringField();
JsonGenerator.writeBooleanField();

Pre-Requisite 

Add below jackson-databind-2.8.5.jar in your classpath or make dependency entry in pom.xml file.

  &lt;dependency&gt;
	&lt;groupId&gt;com.fasterxml.jackson.core&lt;/groupId&gt;
	&lt;artifactId&gt;jackson-databind&lt;/artifactId&gt;
	&lt;version&gt;2.8.5&lt;/version&gt;
  &lt;/dependency&gt;
 

Example

In below example by Jacson API’s generating JSON on runtime and writing on file.

package test.facingissesonit.json.jacson;

import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;

import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;

public class JsonStreamWriteToFile {

	public static void main(String[] args) {
		Student student = sampleStudentObject();
		try {
			JsonGenerator jsonGenerator = new JsonFactory().createGenerator(new FileOutputStream(&quot;student_data.txt&quot;));
			// for pretty formatted printing
			jsonGenerator.setPrettyPrinter(new DefaultPrettyPrinter());
			// start root from here
			jsonGenerator.writeStartObject();

			jsonGenerator.writeNumberField(&quot;rollNumber&quot;, student.getRollNumber());
			jsonGenerator.writeStringField(&quot;firstName&quot;, student.getFirstName());
			jsonGenerator.writeStringField(&quot;lastName&quot;, student.getLastName());
			jsonGenerator.writeBooleanField(&quot;permanent&quot;, student.isPermanent());

			jsonGenerator.writeObjectFieldStart(&quot;address&quot;); // object writing
			jsonGenerator.writeStringField(&quot;addressLine&quot;, student.getAddress().getAddressLine());
			jsonGenerator.writeStringField(&quot;city&quot;, student.getAddress().getCity());
			jsonGenerator.writeNumberField(&quot;zipCode&quot;, student.getAddress().getZipCode());
			jsonGenerator.writeEndObject(); // address object completed

			jsonGenerator.writeArrayFieldStart(&quot;phoneNumbers&quot;);
			for (long num : student.getPhoneNumbers())
				jsonGenerator.writeNumber(num);
			jsonGenerator.writeEndArray();

			 // start array writing for cities
			jsonGenerator.writeArrayFieldStart(&quot;cities&quot;);
			for (String city : student.getCities())
				jsonGenerator.writeString(city);
			// closing cities array
			jsonGenerator.writeEndArray(); 

			jsonGenerator.writeObjectFieldStart(&quot;properties&quot;);
			Set&lt;String&gt; keySet = student.getProperties().keySet();
			for (String key : keySet) {
				String value = student.getProperties().get(key);
				jsonGenerator.writeStringField(key, value);
			}
			// End of  properties writing
			jsonGenerator.writeEndObject();
			//End root object writing
			jsonGenerator.writeEndObject(); 

			jsonGenerator.flush();
			jsonGenerator.close();

		} catch (IOException ex) {
			ex.printStackTrace();
		}

	}

	public static Student sampleStudentObject() {

		Student student = new Student();
		student.setRollNumber(11);
		student.setFirstName(&quot;Saurabh&quot;);
		student.setLastName(&quot;Gupta&quot;);
		student.setPhoneNumbers(new long[] { 2233445566L, 3344556677L });

		Address add = new Address();
		add.setAddressLine(&quot;Lake Union Hill Way&quot;);
		add.setCity(&quot;Atlanta&quot;);
		add.setState(&quot;GA&quot;);
		add.setZipCode(50005);
		student.setAddress(add);

		List&lt;String&gt; cities = new ArrayList&lt;String&gt;();
		cities.add(&quot;Dallas&quot;);
		cities.add(&quot;San Antonio&quot;);
		cities.add(&quot;Irving&quot;);
		student.setCities(cities);

		Map&lt;String, String&gt; props = new HashMap&lt;String, String&gt;();
		props.put(&quot;age&quot;, &quot;34 years&quot;);
		props.put(&quot;interst&quot;, &quot;Math&quot;);
		props.put(&quot;play&quot;, &quot;Badminton&quot;);

		student.setProperties(props);

		return student;
	}
}

Model Object

Student and Address classes are required to execute these code

package test.facingissesonit.json.jacson;

import java.util.Arrays;
import java.util.List;
import java.util.Map;

public class Student {
	private int rollNumber;
	private String firstName;
	private String lastName;
	private boolean permanent;
	private Address address;
	private long[] phoneNumbers;
	private List&lt;String&gt; cities;
	private Map&lt;String, String&gt; properties;
	@Override
	public String toString()
	{
		StringBuffer sb=new StringBuffer();
		sb.append(&quot;==============Student Information================\n&quot;);
		sb.append(&quot;rollNumber=&quot;).append(rollNumber).append(&quot;\n&quot;);
		sb.append(&quot;firstName=&quot;).append(firstName).append(&quot;\n&quot;);
		sb.append(&quot;lastName=&quot;).append(lastName).append(&quot;\n&quot;);
		sb.append(&quot;permanent=&quot;).append(permanent).append(&quot;\n&quot;);
		sb.append(&quot;adress=&quot;).append(address).append(&quot;\n&quot;);
		sb.append(&quot;phoneNumbers=&quot;).append(Arrays.toString(phoneNumbers)).append(&quot;\n&quot;);
		sb.append(&quot;cities=&quot;).append(Arrays.toString(cities.toArray(new String[cities.size()]))).append(&quot;\n&quot;);
		sb.append(&quot;properties=&quot;).append(properties).append(&quot;\n&quot;);
		return sb.toString();
	}
	public int getRollNumber() {
		return rollNumber;
	}
	public void setRollNumber(int rollNumber) {
		this.rollNumber = rollNumber;
	}
	public String getFirstName() {
		return firstName;
	}
	public void setFirstName(String firstName) {
		this.firstName = firstName;
	}
	public String getLastName() {
		return lastName;
	}
	public void setLastName(String lastName) {
		this.lastName = lastName;
	}
	public boolean isPermanent() {
		return permanent;
	}
	public void setPermanent(boolean permanent) {
		this.permanent = permanent;
	}
	public Address getAddress() {
		return address;
	}
	public void setAddress(Address address) {
		this.address = address;
	}
	public long[] getPhoneNumbers() {
		return phoneNumbers;
	}
	public void setPhoneNumbers(long[] phoneNumbers) {
		this.phoneNumbers = phoneNumbers;
	}
	public List&lt;String&gt; getCities() {
		return cities;
	}
	public void setCities(List&lt;String&gt; cities) {
		this.cities = cities;
	}
	public Map&lt;String, String&gt; getProperties() {
		return properties;
	}
	public void setProperties(Map&lt;String, String&gt; properties) {
		this.properties = properties;
	}

}

package test.facingissesonit.json.jacson;

public class Address {
private String addressLine;
private String city;
private String state;
private int zipCode;
@Override
public String toString()
{
	StringBuffer sb=new StringBuffer();
	sb.append(&quot;AddressLine=&quot;).append(addressLine).append(&quot;\n&quot;);
	sb.append(&quot;city=&quot;).append(city).append(&quot;\n&quot;);
	sb.append(&quot;state=&quot;).append(state).append(&quot;\n&quot;);
	sb.append(&quot;zipCode=&quot;).append(zipCode).append(&quot;\n&quot;);
	return sb.toString();
}
public String getAddressLine() {
	return addressLine;
}
public void setAddressLine(String addressLine) {
	this.addressLine = addressLine;
}
public String getCity() {
	return city;
}
public void setCity(String city) {
	this.city = city;
}
public String getState() {
	return state;
}
public void setState(String state) {
	this.state = state;
}
public int getZipCode() {
	return zipCode;
}
public void setZipCode(int zipCode) {
	this.zipCode = zipCode;
}
}

Output

Generated output on Student_data.txt file

{
  &quot;rollNumber&quot; : 11,
  &quot;firstName&quot; : &quot;Saurabh&quot;,
  &quot;lastName&quot; : &quot;Gupta&quot;,
  &quot;permanent&quot; : false,
  &quot;address&quot; : {
    &quot;addressLine&quot; : &quot;Lake Union Hill Way&quot;,
    &quot;city&quot; : &quot;Atlanta&quot;,
    &quot;zipCode&quot; : 50005
  },
  &quot;phoneNumbers&quot; : [ 2233445566, 3344556677 ],
  &quot;cities&quot; : [ &quot;Dallas&quot;, &quot;San Antonio&quot;, &quot;Irving&quot; ],
  &quot;properties&quot; : {
    &quot;play&quot; : &quot;Badminton&quot;,
    &quot;interst&quot; : &quot;Math&quot;,
    &quot;age&quot; : &quot;34 years&quot;
  }
}

More Sample Code

For more java and JDBC codes follow below links

How to Convert JSON data from file to ArrayList?

Jackson provide api’s  to convert JSON data to Collections Objects like Map, ArrayList etc.

In below example file student_data.txt having collections of students data in JSON form and through these Jacson API’s will convert JSON data to ArrayList java object.

Pre-Requisite 

Add below jackson-databind-2.8.5.jar in your classpath or make dependency entry in pom.xml file.

  &lt;dependency&gt;
	&lt;groupId&gt;com.fasterxml.jackson.core&lt;/groupId&gt;
	&lt;artifactId&gt;jackson-databind&lt;/artifactId&gt;
	&lt;version&gt;2.8.5&lt;/version&gt;
  &lt;/dependency&gt;
 

Sample Data

 Add below content in  student_data.txt

[
{
  &quot;rollNumber&quot; : 11,
  &quot;firstName&quot; : &quot;Saurabh&quot;,
  &quot;lastName&quot; : &quot;Gupta&quot;,
  &quot;permanent&quot; : false,
  &quot;address&quot; : {
    &quot;addressLine&quot; : &quot;Lake Union Hill Way&quot;,
    &quot;city&quot; : &quot;Atlanta&quot;,
    &quot;zipCode&quot; : 50005
  },
  &quot;phoneNumbers&quot; : [ 2233445566, 3344556677 ],
  &quot;cities&quot; : [ &quot;Dallas&quot;, &quot;San Antonio&quot;, &quot;Irving&quot; ],
  &quot;properties&quot; : {
    &quot;play&quot; : &quot;Badminton&quot;,
    &quot;interst&quot; : &quot;Math&quot;,
    &quot;age&quot; : &quot;34 years&quot;
  }
},
{
  &quot;rollNumber&quot; : 11,
  &quot;firstName&quot; : &quot;Gaurav&quot;,
  &quot;lastName&quot; : &quot;Khaira&quot;,
  &quot;permanent&quot; : true,
  &quot;address&quot; : {
    &quot;addressLine&quot; : &quot; 5770 Shiloh woods dr&quot;,
    &quot;city&quot; : &quot;Cumming&quot;,
    &quot;zipCode&quot; : 50007
  },
  &quot;phoneNumbers&quot; : [ 2233445567, 3344556678 ],
  &quot;cities&quot; : [ &quot;New York&quot;, &quot;Austin&quot;, &quot;Plano&quot; ],
  &quot;properties&quot; : {
    &quot;play&quot; : &quot;Baseball&quot;,
    &quot;interst&quot; : &quot;Science&quot;,
    &quot;age&quot; : &quot;36 years&quot;
  }
}
]

 

Example 

package test.facingissesonit.json.jacson;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;

import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;

public class ConvertJsonToArrayList {

	public static void main(String[] args) {
		try
		{
		byte[] mapData = Files.readAllBytes(Paths.get(&quot;student_data.txt&quot;));
		Student[] studentArr = null;

		ObjectMapper objectMapper = new ObjectMapper();
		studentArr = objectMapper.readValue(mapData, Student[].class);
		List&lt;Student&gt; studentList=Arrays.asList(studentArr);
		System.out.println(&quot;Student 1 \n&quot;+studentList.get(0));
		System.out.println(&quot;Student 2 \n&quot;+studentList.get(1));

		}
		catch(JsonMappingException ex)
		{
			ex.printStackTrace();
		}
		catch(IOException ex)
		{
			ex.printStackTrace();
		}
	}
}

 

Model Classes

Below Student and Address are required classes for execute this code.

package test.facingissesonit.json.jacson;

import java.util.Arrays;
import java.util.List;
import java.util.Map;

public class Student {
	private int rollNumber;
	private String firstName;
	private String lastName;
	private boolean permanent;
	private Address address;
	private long[] phoneNumbers;
	private List&lt;String&gt; cities;
	private Map&lt;String, String&gt; properties;
	@Override
	public String toString()
	{
		StringBuffer sb=new StringBuffer();
		sb.append(&quot;==============Student Information================\n&quot;);
		sb.append(&quot;rollNumber=&quot;).append(rollNumber).append(&quot;\n&quot;);
		sb.append(&quot;firstName=&quot;).append(firstName).append(&quot;\n&quot;);
		sb.append(&quot;lastName=&quot;).append(lastName).append(&quot;\n&quot;);
		sb.append(&quot;permanent=&quot;).append(permanent).append(&quot;\n&quot;);
		sb.append(&quot;adress=&quot;).append(address).append(&quot;\n&quot;);
		sb.append(&quot;phoneNumbers=&quot;).append(Arrays.toString(phoneNumbers)).append(&quot;\n&quot;);
		sb.append(&quot;cities=&quot;).append(Arrays.toString(cities.toArray(new String[cities.size()]))).append(&quot;\n&quot;);
		sb.append(&quot;properties=&quot;).append(properties).append(&quot;\n&quot;);
		return sb.toString();
	}
	public int getRollNumber() {
		return rollNumber;
	}
	public void setRollNumber(int rollNumber) {
		this.rollNumber = rollNumber;
	}
	public String getFirstName() {
		return firstName;
	}
	public void setFirstName(String firstName) {
		this.firstName = firstName;
	}
	public String getLastName() {
		return lastName;
	}
	public void setLastName(String lastName) {
		this.lastName = lastName;
	}
	public boolean isPermanent() {
		return permanent;
	}
	public void setPermanent(boolean permanent) {
		this.permanent = permanent;
	}
	public Address getAddress() {
		return address;
	}
	public void setAddress(Address address) {
		this.address = address;
	}
	public long[] getPhoneNumbers() {
		return phoneNumbers;
	}
	public void setPhoneNumbers(long[] phoneNumbers) {
		this.phoneNumbers = phoneNumbers;
	}
	public List&lt;String&gt; getCities() {
		return cities;
	}
	public void setCities(List&lt;String&gt; cities) {
		this.cities = cities;
	}
	public Map&lt;String, String&gt; getProperties() {
		return properties;
	}
	public void setProperties(Map&lt;String, String&gt; properties) {
		this.properties = properties;
	}

}

package test.facingissesonit.json.jacson;

public class Address {
private String addressLine;
private String city;
private String state;
private int zipCode;
@Override
public String toString()
{
	StringBuffer sb=new StringBuffer();
	sb.append(&quot;AddressLine=&quot;).append(addressLine).append(&quot;\n&quot;);
	sb.append(&quot;city=&quot;).append(city).append(&quot;\n&quot;);
	sb.append(&quot;state=&quot;).append(state).append(&quot;\n&quot;);
	sb.append(&quot;zipCode=&quot;).append(zipCode).append(&quot;\n&quot;);
	return sb.toString();
}
public String getAddressLine() {
	return addressLine;
}
public void setAddressLine(String addressLine) {
	this.addressLine = addressLine;
}
public String getCity() {
	return city;
}
public void setCity(String city) {
	this.city = city;
}
public String getState() {
	return state;
}
public void setState(String state) {
	this.state = state;
}
public int getZipCode() {
	return zipCode;
}
public void setZipCode(int zipCode) {
	this.zipCode = zipCode;
}
}

Output

Below is console output generated by program which is from Java Object.

Student 1
==============Student Information================
rollNumber=11
firstName=Saurabh
lastName=Gupta
permanent=false
adress=AddressLine=Lake Union Hill Way
city=Atlanta
state=null
zipCode=50005

phoneNumbers=[2233445566, 3344556677]
cities=[Dallas, San Antonio, Irving]
properties={play=Badminton, interst=Math, age=34 years}

Student 2
==============Student Information================
rollNumber=11
firstName=Gaurav
lastName=Khaira
permanent=true
adress=AddressLine= 5770 Shiloh woods dr
city=Cumming
state=null
zipCode=50007

phoneNumbers=[2233445567, 3344556678]
cities=[New York, Austin, Plano]
properties={play=Baseball, interst=Science, age=36 years}

More Sample Code

For more java and JDBC codes follow below links

How to Convert JSON to Java Object and Java Object to JSON?

In below java codes consider both the cases from JSON to Java Object and Java Object to JSON by ObjectMapper by Jacson API’s

I have given generic method for both the cases as convertJavaObjectToJSON and convertJSONToJavaObject.

Pre-Requisite : Add below jackson-databind-2.8.5.jar in your classpath or make dependency entry in pom.xml file.

  &lt;dependency&gt;
	&lt;groupId&gt;com.fasterxml.jackson.core&lt;/groupId&gt;
	&lt;artifactId&gt;jackson-databind&lt;/artifactId&gt;
	&lt;version&gt;2.8.5&lt;/version&gt;
  &lt;/dependency&gt;
 

JAVA and JSON Conversion Example:

Below is complete example for conversion of both the cases.

ObjectMapper.readValue() :Convert JSON String to Java Object

ObjectMapper.writeValue() : Convert JAVA Object to JSON

package test.facingissesonit.json.jacson;

import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;

public class JsonAndJavaObjectConversion {

	public static void main(String[] args) {
		System.out.println(&quot;=================Convert Student Java Object to JSON==============&quot;);
		// convert Object to json string
		Student student = sampleStudentObject();

		String studentStr = convertJavaObjectToJSON(student);
		System.out.println(&quot;Student JSON Data \n&quot; + studentStr);
		System.out.println(&quot;=================Convert JSON DATA  to Student JAVA Object==============&quot;);
		Object object = convertJSONToJavaObject(studentStr, Student.class);
		if (object != null &amp;&amp; object instanceof Student) {
			System.out.println(&quot;Student Object\n&quot; + (Student) object);
		}

	}
	//Generic Method to convert JSON object to Java Object
	public static Object convertJSONToJavaObject(String strJsonData, Class className) {
		try {
			// ObjectMapper new instance
			ObjectMapper objectMapper = new ObjectMapper();
			// //convert json data from file text to object
			return objectMapper.readValue(strJsonData, className);
		} catch (JsonMappingException ex) {
			ex.printStackTrace();
		} catch (JsonGenerationException ex) {
			ex.printStackTrace();
		} catch (IOException ex) {
			ex.printStackTrace();
		}
		return null;
	}
    //Generic Method to convert Java object to JSON
	public static String convertJavaObjectToJSON(Object javaObject) {
		StringWriter jsonStr = null;
		try {

			// ObjectMapper new instance
			ObjectMapper objectMapper = new ObjectMapper();
			// configure Object mapper for pretty print
			objectMapper.configure(SerializationFeature.INDENT_OUTPUT, true);

			// writing to console, can write to any output stream such as file
			jsonStr = new StringWriter();
			objectMapper.writeValue(jsonStr, javaObject);

		} catch (JsonMappingException ex) {
			ex.printStackTrace();
		} catch (JsonGenerationException ex) {
			ex.printStackTrace();
		} catch (IOException ex) {
			ex.printStackTrace();
		}
		return jsonStr.toString();
	}

	public static Student sampleStudentObject() {

		Student student = new Student();
		student.setRollNumber(11);
		student.setFirstName(&quot;Saurabh&quot;);
		student.setLastName(&quot;Gupta&quot;);
		student.setPhoneNumbers(new long[] { 2233445566L, 3344556677L });

		Address add = new Address();
		add.setAddressLine(&quot;Lake Union Hill Way&quot;);
		add.setCity(&quot;Atlanta&quot;);
		add.setState(&quot;GA&quot;);
		add.setZipCode(50005);
		student.setAddress(add);

		List&lt;String&gt; cities = new ArrayList&lt;String&gt;();
		cities.add(&quot;Dallas&quot;);
		cities.add(&quot;San Antonio&quot;);
		cities.add(&quot;Irving&quot;);
		student.setCities(cities);

		Map&lt;String, String&gt; props = new HashMap&lt;String, String&gt;();
		props.put(&quot;age&quot;, &quot;34 years&quot;);
		props.put(&quot;interst&quot;, &quot;Math&quot;);
		props.put(&quot;play&quot;, &quot;Badminton&quot;);

		student.setProperties(props);

		return student;
	}

}

Model Classes :

package test.facingissesonit.json.jacson;

import java.util.Arrays;
import java.util.List;
import java.util.Map;

public class Student {
	private int rollNumber;
	private String firstName;
	private String lastName;
	private boolean permanent;
	private Address address;
	private long[] phoneNumbers;
	private List&lt;String&gt; cities;
	private Map&lt;String, String&gt; properties;
	@Override
	public String toString()
	{
		StringBuffer sb=new StringBuffer();
		sb.append(&quot;==============Student Information================\n&quot;);
		sb.append(&quot;rollNumber=&quot;).append(rollNumber).append(&quot;\n&quot;);
		sb.append(&quot;firstName=&quot;).append(firstName).append(&quot;\n&quot;);
		sb.append(&quot;lastName=&quot;).append(lastName).append(&quot;\n&quot;);
		sb.append(&quot;permanent=&quot;).append(permanent).append(&quot;\n&quot;);
		sb.append(&quot;adress=&quot;).append(address).append(&quot;\n&quot;);
		sb.append(&quot;phoneNumbers=&quot;).append(Arrays.toString(phoneNumbers)).append(&quot;\n&quot;);
		sb.append(&quot;cities=&quot;).append(Arrays.toString(cities.toArray(new String[cities.size()]))).append(&quot;\n&quot;);
		sb.append(&quot;properties=&quot;).append(properties).append(&quot;\n&quot;);
		return sb.toString();
	}
	public int getRollNumber() {
		return rollNumber;
	}
	public void setRollNumber(int rollNumber) {
		this.rollNumber = rollNumber;
	}
	public String getFirstName() {
		return firstName;
	}
	public void setFirstName(String firstName) {
		this.firstName = firstName;
	}
	public String getLastName() {
		return lastName;
	}
	public void setLastName(String lastName) {
		this.lastName = lastName;
	}
	public boolean isPermanent() {
		return permanent;
	}
	public void setPermanent(boolean permanent) {
		this.permanent = permanent;
	}
	public Address getAddress() {
		return address;
	}
	public void setAddress(Address address) {
		this.address = address;
	}
	public long[] getPhoneNumbers() {
		return phoneNumbers;
	}
	public void setPhoneNumbers(long[] phoneNumbers) {
		this.phoneNumbers = phoneNumbers;
	}
	public List&lt;String&gt; getCities() {
		return cities;
	}
	public void setCities(List&lt;String&gt; cities) {
		this.cities = cities;
	}
	public Map&lt;String, String&gt; getProperties() {
		return properties;
	}
	public void setProperties(Map&lt;String, String&gt; properties) {
		this.properties = properties;
	}

}

package test.facingissesonit.json.jacson;

public class Address {
private String addressLine;
private String city;
private String state;
private int zipCode;
@Override
public String toString()
{
	StringBuffer sb=new StringBuffer();
	sb.append(&quot;AddressLine=&quot;).append(addressLine).append(&quot;\n&quot;);
	sb.append(&quot;city=&quot;).append(city).append(&quot;\n&quot;);
	sb.append(&quot;state=&quot;).append(state).append(&quot;\n&quot;);
	sb.append(&quot;zipCode=&quot;).append(zipCode).append(&quot;\n&quot;);
	return sb.toString();
}
public String getAddressLine() {
	return addressLine;
}
public void setAddressLine(String addressLine) {
	this.addressLine = addressLine;
}
public String getCity() {
	return city;
}
public void setCity(String city) {
	this.city = city;
}
public String getState() {
	return state;
}
public void setState(String state) {
	this.state = state;
}
public int getZipCode() {
	return zipCode;
}
public void setZipCode(int zipCode) {
	this.zipCode = zipCode;
}
}

Output:

=================Convert Student Java Object to JSON==============
Student JSON Data
{
  &quot;rollNumber&quot; : 11,
  &quot;firstName&quot; : &quot;Saurabh&quot;,
  &quot;lastName&quot; : &quot;Gupta&quot;,
  &quot;permanent&quot; : false,
  &quot;address&quot; : {
    &quot;addressLine&quot; : &quot;Lake Union Hill Way&quot;,
    &quot;city&quot; : &quot;Atlanta&quot;,
    &quot;state&quot; : &quot;GA&quot;,
    &quot;zipCode&quot; : 50005
  },
  &quot;phoneNumbers&quot; : [ 2233445566, 3344556677 ],
  &quot;cities&quot; : [ &quot;Dallas&quot;, &quot;San Antonio&quot;, &quot;Irving&quot; ],
  &quot;properties&quot; : {
    &quot;play&quot; : &quot;Badminton&quot;,
    &quot;interst&quot; : &quot;Math&quot;,
    &quot;age&quot; : &quot;34 years&quot;
  }
}
=================Convert JSON DATA  to Student JAVA Object==============
Student Object
==============Student Information================
rollNumber=11
firstName=Saurabh
lastName=Gupta
permanent=false
adress=AddressLine=Lake Union Hill Way
city=Atlanta
state=GA
zipCode=50005

phoneNumbers=[2233445566, 3344556677]
cities=[Dallas, San Antonio, Irving]
properties={play=Badminton, interst=Math, age=34 years}

More Sample Code

For more java and JDBC codes follow below links

Elasticsearch REST JAVA Client to get Index Details List

Below is example to get Index Detail in Java Array by using Elasticsearch REST Java client. Here client will call endpoint  “/_cat/indices?format=json” to retrieve all detail of index list. It is same as we use GET by CURL

GET http://elasticsearchHost:9200/_cat/indices?format=json
 

Pre-requisite

  • Minimum requirement for Java 7 version required.
  • Add below dependency for Elasticsearch REST and JSON Mapping in your pom.xml or add in your class path.

Dependency

&lt;!--Elasticsearch REST jar--&gt;
&lt;dependency&gt;
			&lt;groupId&gt;org.elasticsearch.client&lt;/groupId&gt;
			&lt;artifactId&gt;rest&lt;/artifactId&gt;
			&lt;version&gt;5.1.2&lt;/version&gt;
&lt;/dependency&gt;
&lt;!--Jackson jar for mapping json to Java --&gt;
&lt;dependency&gt;
			&lt;groupId&gt;com.fasterxml.jackson.core&lt;/groupId&gt;
			&lt;artifactId&gt;jackson-databind&lt;/artifactId&gt;
			&lt;version&gt;2.8.5&lt;/version&gt;
&lt;/dependency&gt;

Sample Code

import java.io.IOException;
import java.util.Collections;

import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;

import com.fasterxml.jackson.databind.ObjectMapper;

public class ElasticsearchRESTIndexClient {

	public static void main(String[] args) {
		IndexInfo []indexArr = null;
		RestClient client = null;
		try {
			client = openConnection();
			if (client != null) {
				// performRequest GET method will retrieve all index detail list
				// information from elastic server
				Response response = client.performRequest(&quot;GET&quot;, &quot;/_cat/indices?format=json&quot;,
						Collections.singletonMap(&quot;pretty&quot;, &quot;true&quot;));
				// GetEntity api will return content of response in form of json
				// in Http Entity
				HttpEntity entity = response.getEntity();
				ObjectMapper jacksonObjectMapper = new ObjectMapper();
				// Map json response to Java object in IndexInfo Array
				// Cluster Info
				indexArr = jacksonObjectMapper.readValue(entity.getContent(), IndexInfo[].class);
				for(IndexInfo indexInfo:indexArr)
				{
				System.out.println(indexInfo);
			    }
			}

		} catch (Exception ex) {
			System.out.println(&quot;Exception found while getting cluster detail&quot;);
			ex.printStackTrace();
		} finally {
			closeConnnection(client);
		}

	}

	// Get Rest client connection
	private static RestClient openConnection() {
		RestClient client = null;
		try {
			final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
			credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(&quot;userid&quot;, &quot;password&quot;));
			client = RestClient.builder(new HttpHost(&quot;elasticHost&quot;, Integer.parseInt(&quot;9200&quot;)))
					.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
						// Customize connection as per requirement
						public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
							return httpClientBuilder
									// Credentials
									.setDefaultCredentialsProvider(credentialsProvider)
									// Proxy
									.setProxy(new HttpHost(&quot;proxyServer&quot;, 8080));

						}
					}).setMaxRetryTimeoutMillis(60000).build();

		} catch (Exception ex) {
			ex.printStackTrace();
		}
		return client;
	}

	// Close Open connection
	private static void closeConnnection(RestClient client) {
		if (client != null) {
			try {
				client.close();
			} catch (IOException ex) {
				ex.printStackTrace();
			}
		}
	}

}

Index Info Object where JSON index detail will map

import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

@JsonIgnoreProperties(ignoreUnknown = true)
public class IndexInfo {
@JsonProperty(value = &quot;health&quot;)
private String health;
@JsonProperty(value = &quot;index&quot;)
private String indexName;
@JsonProperty(value = &quot;status&quot;)
private String status;
@JsonProperty(value = &quot;pri&quot;)
private int shards;
@JsonProperty(value = &quot;rep&quot;)
private int replica;
@JsonProperty(value = &quot;pri.store.size&quot;)
private String dataSize;
@JsonProperty(value = &quot;store.size&quot;)
private String totalDataSize;
@JsonProperty(value = &quot;docs.count&quot;)
private String documentCount;

@Override
public String toString()
{
	StringBuffer str=new StringBuffer(60);
	str.append(&quot;{\n&quot;);
	str.append(&quot;    \&quot;&quot;).append(&quot;indexName&quot;).append(&quot;\&quot;:\&quot;&quot;).append(indexName).append(&quot;\&quot;,\n&quot;);
	str.append(&quot;    \&quot;&quot;).append(&quot;health&quot;).append(&quot;\&quot;:\&quot;&quot;).append(health).append(&quot;\&quot;,\n&quot;);
	str.append(&quot;    \&quot;&quot;).append(&quot;status&quot;).append(&quot;\&quot;:\&quot;&quot;).append(status).append(&quot;\&quot;,\n&quot;);
	str.append(&quot;    \&quot;&quot;).append(&quot;shards&quot;).append(&quot;\&quot;:\&quot;&quot;).append(shards).append(&quot;\&quot;,\n&quot;);
	str.append(&quot;    \&quot;&quot;).append(&quot;replica&quot;).append(&quot;\&quot;:\&quot;&quot;).append(replica).append(&quot;\&quot;,\n&quot;);
	str.append(&quot;    \&quot;&quot;).append(&quot;dataSize&quot;).append(&quot;\&quot;:\&quot;&quot;).append(dataSize).append(&quot;\&quot;,\n&quot;);
	str.append(&quot;    \&quot;&quot;).append(&quot;totalDataSize&quot;).append(&quot;\&quot;:\&quot;&quot;).append(totalDataSize).append(&quot;\&quot;,\n&quot;);
	str.append(&quot;    \&quot;&quot;).append(&quot;documentCount&quot;).append(&quot;\&quot;:\&quot;&quot;).append(documentCount).append(&quot;\&quot;\n&quot;);
	str.append(&quot;    \&quot;&quot;);
	return str.toString();
}
public String getIndexName() {
	return indexName;
}
public void setIndexName(String indexName) {
	this.indexName = indexName;
}
public int getShards() {
	return shards;
}
public void setShards(int shards) {
	this.shards = shards;
}
public int getReplica() {
	return replica;
}
public void setReplica(int replica) {
	this.replica = replica;
}
public String getDataSize() {
	return dataSize;
}
public void setDataSize(String dataSize) {
	this.dataSize = dataSize;
}
public String getTotalDataSize() {
	return totalDataSize;
}
public void setTotalDataSize(String totalDataSize) {
	this.totalDataSize = totalDataSize;
}
public String getDocumentCount() {
	return documentCount;
}
public void setDocumentCount(String documentCount) {
	this.documentCount = documentCount;
}
public String getStatus() {
	return status;
}
public void setStatus(String status) {
	this.status = status;
}
public String getHealth() {
	return health;
}
public void setHealth(String health) {
	this.health = health;
}
}

Read More on Elasticsearch REST

Integration

Integrate Filebeat, Kafka, Logstash, Elasticsearch and Kibana