diff --git a/hazelcast-integration/README.md b/hazelcast-integration/README.md
index b24df405f..088ecde0b 100644
--- a/hazelcast-integration/README.md
+++ b/hazelcast-integration/README.md
@@ -25,3 +25,5 @@
Implementation of integration of MongoDB with hazelcast.
-
openshift
A guideline to start using Hazelcast on the Red Hat OpenShift platform.
+- hikari-connection-pool
+ A sample demo code integrate Hazelcast with Hikari connection pool.
diff --git a/hazelcast-integration/hikari-connection-pool/README.md b/hazelcast-integration/hikari-connection-pool/README.md
new file mode 100644
index 000000000..eb3deeabd
--- /dev/null
+++ b/hazelcast-integration/hikari-connection-pool/README.md
@@ -0,0 +1,21 @@
+Hazelcast Integration with Hikari Connection Pool
+To create Hikari connection pool, first create database table mapping with Hazelcast in-memory map.
+In this demo, I have used EMPLOYEE table created on H2 DB, which is mapped with employee-map.
+
+- Create Hikari data source from HikariConfig, with database credentials and connection pool configuration e.g. pool size, connection timeout etc.
+- Get the connection from Hikari Data Source.
+- Implement the Map Store Factory from MapLoader, MapStore and override all the necessary methods.
+- Register the MapStore into hazelcast.xml config.
+
+
+
+ ...
+
+ ...
+
+
+Database connection information can be externalized from hazelcast.xml. Build the above classes in a jar and copy them inside hazelcast user-lib directory under the Hazelcast installation.
\ No newline at end of file
diff --git a/hazelcast-integration/hikari-connection-pool/pom.xml b/hazelcast-integration/hikari-connection-pool/pom.xml
new file mode 100644
index 000000000..336ad455f
--- /dev/null
+++ b/hazelcast-integration/hikari-connection-pool/pom.xml
@@ -0,0 +1,40 @@
+
+
+ 4.0.0
+ jar
+
+ com.hazelcast.samples
+ hazelcast-integration
+ 0.1-SNAPSHOT
+
+
+ hikari-connection-pool
+ Hazelcast Hikari Connection Pool
+ A sample demo code to integrate Hazelcast with Hikari connection pool
+
+
+ ${project.parent.parent.basedir}
+
+ 17
+ 17
+ UTF-8
+
+
+
+
+ com.hazelcast
+ hazelcast
+ ${hazelcast.version}
+ provided
+
+
+
+ com.h2database
+ h2
+ 2.2.224
+ test
+
+
+
\ No newline at end of file
diff --git a/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/connection/JDBCBasicConnection.java b/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/connection/JDBCBasicConnection.java
new file mode 100644
index 000000000..a48da4ef1
--- /dev/null
+++ b/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/connection/JDBCBasicConnection.java
@@ -0,0 +1,15 @@
+package com.hazelcast.samples.connection;
+
+import java.sql.DriverManager;
+import java.sql.SQLException;
+
+public class JDBCBasicConnection {
+ public static void getConnection() {
+ try {
+ Class.forName("org.h2.Driver");
+ DriverManager.getConnection("jdbc:h2:~/test", "sa", "");
+ } catch (ClassNotFoundException | SQLException e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/connection/pool/HikariDataSourcePool.java b/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/connection/pool/HikariDataSourcePool.java
new file mode 100644
index 000000000..ec64ca1ed
--- /dev/null
+++ b/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/connection/pool/HikariDataSourcePool.java
@@ -0,0 +1,58 @@
+package com.hazelcast.samples.connection.pool;
+
+import com.hazelcast.shaded.com.zaxxer.hikari.HikariConfig;
+import com.hazelcast.shaded.com.zaxxer.hikari.HikariDataSource;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+
+/**
+ * Connection pool data source class
+ * Implement this class as thread-safe in multithreading environment
+ */
+public class HikariDataSourcePool {
+ private static HikariDataSource hikariDataSource = null;
+ private static final HikariDataSourcePool hikariDataSourcePool = null;
+
+ private HikariDataSourcePool() {
+ if (null != hikariDataSource) {
+ System.out.println("Hikari data source already created. existing connection can be used.");
+ } else {
+ createHikariDataSource();
+ }
+ }
+
+ private void createHikariDataSource() {
+ HikariConfig hikariConfig = new HikariConfig();
+
+ hikariConfig.setJdbcUrl("jdbc:h2:mem:testdb");
+ hikariConfig.setUsername("sa");
+ hikariConfig.setPassword("");
+ hikariConfig.setMaximumPoolSize(5);
+ hikariConfig.setIdleTimeout(30000);
+ hikariConfig.setConnectionTimeout(30000);
+ hikariConfig.setPoolName("Demo-POOL");
+ hikariConfig.setDriverClassName("org.h2.Driver");
+
+ hikariDataSource = new HikariDataSource(hikariConfig);
+
+ System.out.println("Datasource Created..");
+ }
+
+ /**
+ *
+ * Implementation should be thread-safe
+ */
+ public static synchronized Connection getConnection() {
+ try {
+ if (null != hikariDataSource) {
+ System.err.println("\nGetting....! SQL Connection from HIKARI POOL.\n");
+ return hikariDataSource.getConnection();
+ } else {
+ throw new RuntimeException("Ops! Hikari datasource not available.");
+ }
+ } catch (SQLException exception) {
+ throw new RuntimeException("Exception while creating database connection." + exception);
+ }
+ }
+}
diff --git a/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/map/EmployeeMapStore.java b/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/map/EmployeeMapStore.java
new file mode 100644
index 000000000..bfc69c165
--- /dev/null
+++ b/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/map/EmployeeMapStore.java
@@ -0,0 +1,136 @@
+package com.hazelcast.samples.map;
+
+import com.hazelcast.map.MapLoader;
+import com.hazelcast.map.MapStore;
+import com.hazelcast.samples.model.Employee;
+
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import static com.hazelcast.samples.connection.pool.HikariDataSourcePool.getConnection;
+
+public class EmployeeMapStore implements MapLoader, MapStore {
+
+ public EmployeeMapStore() {
+ }
+
+ @Override
+ public Iterable loadAllKeys() {
+ String query = "SELECT EMPID FROM EMPLOYEE";
+
+ List empIds = new ArrayList<>();
+ try (Connection connection = getConnection();
+ PreparedStatement preparedStatement = connection.prepareStatement(query);
+ ResultSet resultSet = preparedStatement.executeQuery()) {
+ while (resultSet.next()) {
+ empIds.add(resultSet.getInt(1));
+ }
+ } catch (SQLException exception) {
+ throw new RuntimeException("Error on load all keys : " + exception);
+ }
+
+ return empIds;
+ }
+
+ @Override
+ public Employee load(Integer empId) {
+ String query = "SELECT EMPID, NAME, SALARY FROM EMPLOYEE WHERE EMPID=?";
+ Employee employee = null;
+ try (Connection connection = getConnection();
+ PreparedStatement preparedStatement = connection.prepareStatement(query)) {
+ preparedStatement.setInt(1, empId);
+ ResultSet resultSet = preparedStatement.executeQuery();
+
+ if (resultSet.next()) {
+ employee = new Employee(resultSet.getInt(1), resultSet.getString(2), resultSet.getDouble(3));
+ }
+ } catch (SQLException exception) {
+ throw new RuntimeException("Error on load key : " + exception);
+ }
+
+ return employee;
+ }
+
+ @Override
+ public Map loadAll(Collection collection) {
+ System.out.println("Load all employee..");
+
+ List employees = (List) collection;
+
+ return employees.stream().collect(Collectors.toMap(id -> id, id -> load(id).toString()));
+ }
+
+ @Override
+ public void store(Integer integer, Employee employee) {
+ String storeQuery = "INSERT INTO EMPLOYEE(EMPID, NAME, SALARY) VALUES(?, ?, ?)";
+ try (Connection connection = getConnection();
+ PreparedStatement preparedStatement = connection.prepareStatement(storeQuery)) {
+ preparedStatement.setInt(1, employee.empId());
+ preparedStatement.setString(2, employee.name());
+ preparedStatement.setDouble(3, employee.salary());
+
+ preparedStatement.executeUpdate();
+ } catch (Exception exception) {
+ System.out.println("Exception : " + exception.getMessage());
+ throw new RuntimeException(exception.getMessage());
+ }
+ }
+
+
+ @Override
+ public void storeAll(Map map) {
+ String storeQuery = "INSERT INTO EMPLOYEE(EMPID, NAME, SALARY) VALUES(?, ?, ?)";
+ try (Connection connection = getConnection();
+ PreparedStatement preparedStatement = connection.prepareStatement(storeQuery)) {
+ map.forEach((identity, employee) -> {
+
+ try {
+ preparedStatement.setInt(1, employee.empId());
+ preparedStatement.setString(2, employee.name());
+ preparedStatement.setDouble(3, employee.salary());
+ preparedStatement.addBatch();
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ });
+
+ int[] batchResults = preparedStatement.executeBatch();
+ } catch (SQLException exception) {
+ System.out.println("Exception : " + exception.getMessage());
+ throw new RuntimeException(exception.getMessage());
+ }
+ }
+
+ @Override
+ public void delete(Integer empId) {
+ String deleteQuery = "DELETE FROM EMPLOYEE WHERE EMPID=?";
+ try (Connection connection = getConnection();
+ PreparedStatement preparedStatement = connection.prepareStatement(deleteQuery)) {
+ preparedStatement.setInt(1, empId);
+
+ preparedStatement.executeUpdate();
+ } catch (Exception exception) {
+ System.out.println("Exception : " + exception.getMessage());
+ throw new RuntimeException(exception.getMessage());
+ }
+ }
+
+ @Override
+ public void deleteAll(Collection empIds) {
+ String deleteQuery = "DELETE FROM EMPLOYEE WHERE EMPID IN (?)";
+ try (Connection connection = getConnection();
+ PreparedStatement preparedStatement = connection.prepareStatement(deleteQuery)) {
+ Array empIdsInArray = connection.createArrayOf("integer", empIds.toArray());
+ preparedStatement.setArray(1, empIdsInArray);
+
+ preparedStatement.executeUpdate();
+ } catch (Exception exception) {
+ System.out.println("Exception : " + exception.getMessage());
+ throw new RuntimeException(exception.getMessage());
+ }
+ }
+}
diff --git a/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/model/Employee.java b/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/model/Employee.java
new file mode 100644
index 000000000..c62982084
--- /dev/null
+++ b/hazelcast-integration/hikari-connection-pool/src/main/java/com/hazelcast/samples/model/Employee.java
@@ -0,0 +1,5 @@
+package com.hazelcast.samples.model;
+
+public record Employee(Integer empId, String name, Double salary) {
+
+}
diff --git a/hazelcast-integration/hikari-connection-pool/src/main/resources/sample-hazelcast.xml b/hazelcast-integration/hikari-connection-pool/src/main/resources/sample-hazelcast.xml
new file mode 100644
index 000000000..211a9be02
--- /dev/null
+++ b/hazelcast-integration/hikari-connection-pool/src/main/resources/sample-hazelcast.xml
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
diff --git a/hazelcast-integration/pom.xml b/hazelcast-integration/pom.xml
index cc959df27..171351cf4 100644
--- a/hazelcast-integration/pom.xml
+++ b/hazelcast-integration/pom.xml
@@ -46,6 +46,7 @@
manager-based-session-replication
openshift
eureka
+ hikari-connection-pool