IDEMPIERE-5013 Implement HikariCP as a replacement for c3p0 (#926)
* Replaced PostgreSQL and Oracle connection pools with HikariCP. Replaced C3P0 with HikariCP. HikariCP is a Apache licensed connection pool with substantially better performance and better resilience to failure (DB disconnects, etc.) then C3P0. Read more about it here: https://github.com/brettwooldridge/HikariCP . Cleaned up the `getCachedConnection` method. With HikariCP there is no need to retry to obtain a connection since getting an connection will block until a free connection is available or until a timeout is reached (default 30 seconds) at which point an `SQLException` is thrown. This also removed calling `Runtime.getRuntime().runFinalization();`. HikariCP is currently configured to detect / log leaks when a connection hasn't returned to the pool for longer then 5 minutes. Loading of pool config properties was cleaned up. Defaults are now loaded from a single file instead of defaults coming from both file and hardcoded properties. It is now also possible to specify any HikariCP property in the user pool property file. Initialization of the datasource must happen in the `getDataSource()` method because at object construction not all JDBC config is known. However this method could (as far as I could tell) be called concurrently from multiple threads but had no mechanism to prevent initializing the DB pool multiple times. The variable in which the pool itself was stored (`m_ds`) also was not marked volatile or immutable which could lead to visibility issues. Instead of lazy initialization of the pool in the `getDataSource()` method the pool could probably better be initialized at object construction. However I wasn't able to achieve that without breakage therefor I made the initialization mechanism work correctly with concurrent invocations. Various config options such as the `MaxStatementsPerConnection` options were removed because HikariCP doesn't support them. * (Re)added Sequence time-out.
This commit is contained in:
parent
7e6ba65858
commit
861e3ad01f
|
@ -0,0 +1,10 @@
|
||||||
|
-- IDEMPIERE-5013 Implement HikariCP as a replacement for c3p0
|
||||||
|
SELECT register_migration_script('202208072022_IDEMPIERE-5013.sql') FROM dual;
|
||||||
|
|
||||||
|
SET SQLBLANKLINES ON
|
||||||
|
SET DEFINE OFF
|
||||||
|
|
||||||
|
-- Aug 7, 2022, 8:22:48 PM CEST
|
||||||
|
INSERT INTO AD_SysConfig (AD_SysConfig_ID,AD_Client_ID,AD_Org_ID,Created,Updated,CreatedBy,UpdatedBy,IsActive,Name,Value,Description,EntityType,ConfigurationLevel,AD_SysConfig_UU) VALUES (200202,0,0,TO_TIMESTAMP('2022-08-07 20:22:48','YYYY-MM-DD HH24:MI:SS'),TO_TIMESTAMP('2022-08-07 20:22:48','YYYY-MM-DD HH24:MI:SS'),100,100,'Y','MSEQUENCE_GETNEXT_TIMEOUT','30','Timeout in seconds for getting the next sequence from AD_Sequence table','D','C','77ad6242-4a80-448b-8385-42453cd831ba')
|
||||||
|
;
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
-- IDEMPIERE-5013 Implement HikariCP as a replacement for c3p0
|
||||||
|
SELECT register_migration_script('202208072022_IDEMPIERE-5013.sql') FROM dual;
|
||||||
|
|
||||||
|
-- Aug 7, 2022, 8:22:48 PM CEST
|
||||||
|
INSERT INTO AD_SysConfig (AD_SysConfig_ID,AD_Client_ID,AD_Org_ID,Created,Updated,CreatedBy,UpdatedBy,IsActive,Name,Value,Description,EntityType,ConfigurationLevel,AD_SysConfig_UU) VALUES (200202,0,0,TO_TIMESTAMP('2022-08-07 20:22:48','YYYY-MM-DD HH24:MI:SS'),TO_TIMESTAMP('2022-08-07 20:22:48','YYYY-MM-DD HH24:MI:SS'),100,100,'Y','MSEQUENCE_GETNEXT_TIMEOUT','30','Timeout in seconds for getting the next sequence from AD_Sequence table','D','C','77ad6242-4a80-448b-8385-42453cd831ba')
|
||||||
|
;
|
||||||
|
|
|
@ -195,7 +195,8 @@ public class MSequence extends X_AD_Sequence
|
||||||
//
|
//
|
||||||
if (DB.getDatabase().isQueryTimeoutSupported())
|
if (DB.getDatabase().isQueryTimeoutSupported())
|
||||||
{
|
{
|
||||||
pstmt.setQueryTimeout(QUERY_TIME_OUT);
|
int timeout = MSysConfig.getIntValue(MSysConfig.MSEQUENCE_GETNEXT_TIMEOUT, QUERY_TIME_OUT, Env.getAD_Client_ID(Env.getCtx())); // default 30 seconds
|
||||||
|
pstmt.setQueryTimeout(timeout);
|
||||||
}
|
}
|
||||||
rs = pstmt.executeQuery();
|
rs = pstmt.executeQuery();
|
||||||
if (s_log.isLoggable(Level.FINEST)) s_log.finest("AC=" + conn.getAutoCommit() + ", RO=" + conn.isReadOnly()
|
if (s_log.isLoggable(Level.FINEST)) s_log.finest("AC=" + conn.getAutoCommit() + ", RO=" + conn.isReadOnly()
|
||||||
|
@ -432,7 +433,8 @@ public class MSequence extends X_AD_Sequence
|
||||||
//
|
//
|
||||||
if (DB.getDatabase().isQueryTimeoutSupported())
|
if (DB.getDatabase().isQueryTimeoutSupported())
|
||||||
{
|
{
|
||||||
pstmt.setQueryTimeout(QUERY_TIME_OUT);
|
int timeout = MSysConfig.getIntValue(MSysConfig.MSEQUENCE_GETNEXT_TIMEOUT, QUERY_TIME_OUT, Env.getAD_Client_ID(Env.getCtx())); // default 30 seconds
|
||||||
|
pstmt.setQueryTimeout(timeout);
|
||||||
}
|
}
|
||||||
rs = pstmt.executeQuery();
|
rs = pstmt.executeQuery();
|
||||||
|
|
||||||
|
|
|
@ -138,6 +138,7 @@ public class MSysConfig extends X_AD_SysConfig
|
||||||
public static final String MONITOR_MAX_WAIT_FOR_CLUSTER_IN_SECONDS = "MONITOR_MAX_WAIT_FOR_CLUSTER_IN_SECONDS";
|
public static final String MONITOR_MAX_WAIT_FOR_CLUSTER_IN_SECONDS = "MONITOR_MAX_WAIT_FOR_CLUSTER_IN_SECONDS";
|
||||||
public static final String MFG_ValidateCostsDifferenceOnCreate = "MFG_ValidateCostsDifferenceOnCreate";
|
public static final String MFG_ValidateCostsDifferenceOnCreate = "MFG_ValidateCostsDifferenceOnCreate";
|
||||||
public static final String MFG_ValidateCostsOnCreate = "MFG_ValidateCostsOnCreate";
|
public static final String MFG_ValidateCostsOnCreate = "MFG_ValidateCostsOnCreate";
|
||||||
|
public static final String MSEQUENCE_GETNEXT_TIMEOUT = "MSEQUENCE_GETNEXT_TIMEOUT";
|
||||||
public static final String PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CHECK_ON_PAYMENT = "PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CHECK_ON_PAYMENT";
|
public static final String PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CHECK_ON_PAYMENT = "PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CHECK_ON_PAYMENT";
|
||||||
public static final String PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CHECK_ON_RECEIPT = "PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CHECK_ON_RECEIPT";
|
public static final String PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CHECK_ON_RECEIPT = "PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CHECK_ON_RECEIPT";
|
||||||
public static final String PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CREDIT_CARD = "PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CREDIT_CARD";
|
public static final String PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CREDIT_CARD = "PAYMENT_OVERWRITE_DOCUMENTNO_WITH_CREDIT_CARD";
|
||||||
|
|
|
@ -222,6 +222,7 @@ public class CLogErrorBuffer extends Handler
|
||||||
&& !methodName.equals("dataSave")
|
&& !methodName.equals("dataSave")
|
||||||
&& loggerName.indexOf("Issue") == -1
|
&& loggerName.indexOf("Issue") == -1
|
||||||
&& loggerName.indexOf("CConnection") == -1
|
&& loggerName.indexOf("CConnection") == -1
|
||||||
|
&& !loggerName.startsWith("com.zaxxer.hikari")
|
||||||
&& DB.isConnected()
|
&& DB.isConnected()
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
@ -245,7 +246,8 @@ public class CLogErrorBuffer extends Handler
|
||||||
&& !methodName.equals("get_Value")
|
&& !methodName.equals("get_Value")
|
||||||
&& !methodName.equals("dataSave")
|
&& !methodName.equals("dataSave")
|
||||||
&& loggerName.indexOf("Issue") == -1
|
&& loggerName.indexOf("Issue") == -1
|
||||||
&& loggerName.indexOf("CConnection") == -1)
|
&& loggerName.indexOf("CConnection") == -1
|
||||||
|
&& !loggerName.startsWith("com.zaxxer.hikari"))
|
||||||
{
|
{
|
||||||
System.err.println(getFormatter().format(record));
|
System.err.println(getFormatter().format(record));
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,9 +8,7 @@ Require-Capability: osgi.ee;filter:="(&(osgi.ee=JavaSE)(version>=11))"
|
||||||
Require-Bundle: org.adempiere.base;bundle-version="0.0.0",
|
Require-Bundle: org.adempiere.base;bundle-version="0.0.0",
|
||||||
org.adempiere.install;bundle-version="0.0.0";resolution:=optional
|
org.adempiere.install;bundle-version="0.0.0";resolution:=optional
|
||||||
Bundle-ClassPath: .,
|
Bundle-ClassPath: .,
|
||||||
lib/c3p0-oracle-thin-extras.jar,
|
lib/HikariCP.jar,
|
||||||
lib/c3p0.jar,
|
|
||||||
lib/mchange-commons-java.jar,
|
|
||||||
lib/ojdbc10.jar
|
lib/ojdbc10.jar
|
||||||
Import-Package: org.osgi.framework,
|
Import-Package: org.osgi.framework,
|
||||||
org.slf4j;version="1.7.2"
|
org.slf4j;version="1.7.2"
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
#timeout
|
|
||||||
IdleConnectionTestPeriod=1200
|
|
||||||
AcquireRetryAttempts=2
|
|
||||||
MaxIdleTimeExcessConnections=1200
|
|
||||||
MaxIdleTime=1200
|
|
||||||
#UnreturnedConnectionTimeout=1800
|
|
||||||
|
|
||||||
#size
|
|
||||||
MaxPoolSize=15
|
|
||||||
InitialPoolSize=1
|
|
||||||
MinPoolSize=1
|
|
||||||
|
|
||||||
#flag
|
|
||||||
TestConnectionOnCheckin=false
|
|
||||||
TestConnectionOnCheckout=true
|
|
||||||
#CheckoutTimeout=60;
|
|
|
@ -1,18 +1,72 @@
|
||||||
#timeout
|
# !! ALL SETTINGS PRESENT IN THIS FILE WILL BE FED IN TO HIKARICP !!
|
||||||
IdleConnectionTestPeriod=1200
|
# !! DO NOT SET EMPTY VALUES !!
|
||||||
AcquireRetryAttempts=2
|
#
|
||||||
MaxIdleTimeExcessConnections=1200
|
# You can add HikariCP settings that are not present in this file. In order to
|
||||||
MaxIdleTime=1200
|
# use the default just remove or comment out the key all together.
|
||||||
#UnreturnedConnectionTimeout=1800
|
|
||||||
|
|
||||||
#size
|
# This property controls the maximum number of milliseconds that a client (that's you)
|
||||||
MaxPoolSize=150
|
# will wait for a connection from the pool. If this time is exceeded without a
|
||||||
InitialPoolSize=10
|
# connection becoming available, a SQLException will be thrown. Lowest acceptable
|
||||||
MinPoolSize=5
|
# connection timeout is 250 ms.
|
||||||
MaxStatementsPerConnection=30
|
# Default: 30000 (30 seconds)
|
||||||
|
connectionTimeout=60000
|
||||||
|
|
||||||
#flag
|
# This property controls the maximum amount of time that a connection is allowed
|
||||||
TestConnectionOnCheckin=false
|
# to sit idle in the pool. This setting only applies when minimumIdle is defined
|
||||||
TestConnectionOnCheckout=true
|
# to be less than maximumPoolSize. Idle connections will not be retired once the
|
||||||
#CheckoutTimeout=60;
|
# pool reaches minimumIdle connections. Whether a connection is retired as idle
|
||||||
com.mchange.v2.log.MLog=com.mchange.v2.log.slf4j.Slf4jMLog
|
# or not is subject to a maximum variation of +30 seconds, and average variation
|
||||||
|
# of +15 seconds. A connection will never be retired as idle before this timeout.
|
||||||
|
# A value of 0 means that idle connections are never removed from the pool.
|
||||||
|
# The minimum allowed value is 10000ms (10 seconds).
|
||||||
|
# Default: 600000 (10 minutes)
|
||||||
|
#idleTimeout=
|
||||||
|
|
||||||
|
# This property controls how frequently HikariCP will attempt to keep a connection
|
||||||
|
# alive, in order to prevent it from being timed out by the database or network infrastructure.
|
||||||
|
# This value must be less than the maxLifetime value. A "keepalive" will only occur on an idle
|
||||||
|
# connection. When the time arrives for a "keepalive" against a given connection, that
|
||||||
|
# connection will be removed from the pool, "pinged", and then returned to the pool. The
|
||||||
|
# 'ping' is one of either: invocation of the JDBC4 isValid() method, or execution of the
|
||||||
|
# connectionTestQuery. Typically, the duration out-of-the-pool should be measured in single
|
||||||
|
# digit milliseconds or even sub-millisecond, and therefore should have little or no noticible
|
||||||
|
# performance impact. The minimum allowed value is 30000ms (30 seconds), but a value in the
|
||||||
|
# range of minutes is most desirable. Default: 0 (disabled)
|
||||||
|
#keepaliveTime=
|
||||||
|
|
||||||
|
# This property controls the minimum number of idle connections that HikariCP
|
||||||
|
# tries to maintain in the pool. If the idle connections dip below this value
|
||||||
|
# and total connections in the pool are less than maximumPoolSize, HikariCP
|
||||||
|
# will make a best effort to add additional connections quickly and efficiently.
|
||||||
|
# However, for maximum performance and responsiveness to spike demands, we
|
||||||
|
# recommend not setting this value and instead allowing HikariCP to act as a
|
||||||
|
# fixed size connection pool.
|
||||||
|
# Default: same as maximumPoolSize
|
||||||
|
#minimumIdle=
|
||||||
|
|
||||||
|
# This property controls the maximum size that the pool is allowed to reach,
|
||||||
|
# including both idle and in-use connections. Basically this value will determine
|
||||||
|
# the maximum number of actual connections to the database backend. A reasonable
|
||||||
|
# value for this is best determined by your execution environment. When the pool
|
||||||
|
# reaches this size, and no idle connections are available, calls to getConnection()
|
||||||
|
# will block for up to connectionTimeout milliseconds before timing out. Please
|
||||||
|
# read about pool sizing: https://github.com/brettwooldridge/HikariCP/wiki/About-Pool-Sizing
|
||||||
|
# Default: 10
|
||||||
|
maximumPoolSize=30
|
||||||
|
|
||||||
|
# This property controls the maximum lifetime of a connection in the pool. An
|
||||||
|
# in-use connection will never be retired, only when it is closed will it then be
|
||||||
|
# removed. On a connection-by-connection basis, minor negative attenuation is applied
|
||||||
|
# to avoid mass-extinction in the pool. We strongly recommend setting this value, and
|
||||||
|
# it should be several seconds shorter than any database or infrastructure imposed
|
||||||
|
# connection time limit. A value of 0 indicates no maximum lifetime (infinite lifetime),
|
||||||
|
# subject of course to the idleTimeout setting. The minimum allowed value is 30000ms
|
||||||
|
# (30 seconds).
|
||||||
|
# Default: 1800000 (30 minutes)
|
||||||
|
#maxLifetime=
|
||||||
|
|
||||||
|
# This property controls the amount of time that a connection can be out of the
|
||||||
|
# pool before a message is logged indicating a possible connection leak. A value of 0
|
||||||
|
# means leak detection is disabled. Lowest acceptable value for enabling leak detection
|
||||||
|
# is 2000 (2 seconds). Default: 0
|
||||||
|
leakDetectionThreshold=300000
|
|
@ -4,8 +4,6 @@ bin.includes = META-INF/,\
|
||||||
plugin.xml,\
|
plugin.xml,\
|
||||||
OSGI-INF/oracleprovider.xml,\
|
OSGI-INF/oracleprovider.xml,\
|
||||||
OSGI-INF/,\
|
OSGI-INF/,\
|
||||||
lib/c3p0-oracle-thin-extras.jar,\
|
lib/ojdbc10.jar,\
|
||||||
lib/c3p0.jar,\
|
lib/HikariCP.jar
|
||||||
lib/mchange-commons-java.jar,\
|
|
||||||
lib/ojdbc10.jar
|
|
||||||
source.. = src/
|
source.. = src/
|
||||||
|
|
|
@ -24,19 +24,9 @@
|
||||||
<configuration>
|
<configuration>
|
||||||
<artifactItems>
|
<artifactItems>
|
||||||
<artifactItem>
|
<artifactItem>
|
||||||
<groupId>com.mchange</groupId>
|
<groupId>com.zaxxer</groupId>
|
||||||
<artifactId>c3p0</artifactId>
|
<artifactId>HikariCP</artifactId>
|
||||||
<version>0.9.5.5</version>
|
<version>5.0.1</version>
|
||||||
</artifactItem>
|
|
||||||
<artifactItem>
|
|
||||||
<groupId>com.mchange</groupId>
|
|
||||||
<artifactId>mchange-commons-java</artifactId>
|
|
||||||
<version>0.2.20</version>
|
|
||||||
</artifactItem>
|
|
||||||
<artifactItem>
|
|
||||||
<groupId>com.google.code.maven-play-plugin.com.mchange</groupId>
|
|
||||||
<artifactId>c3p0-oracle-thin-extras</artifactId>
|
|
||||||
<version>0.9.5</version>
|
|
||||||
</artifactItem>
|
</artifactItem>
|
||||||
<artifactItem>
|
<artifactItem>
|
||||||
<groupId>com.oracle.database.jdbc</groupId>
|
<groupId>com.oracle.database.jdbc</groupId>
|
||||||
|
|
|
@ -17,14 +17,12 @@
|
||||||
package org.compiere.db;
|
package org.compiere.db;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.math.RoundingMode;
|
import java.math.RoundingMode;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
import java.nio.file.StandardCopyOption;
|
import java.nio.file.StandardCopyOption;
|
||||||
import java.sql.Connection;
|
import java.sql.Connection;
|
||||||
import java.sql.Driver;
|
import java.sql.Driver;
|
||||||
|
@ -34,7 +32,8 @@ import java.sql.ResultSet;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.Random;
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
@ -52,9 +51,10 @@ import org.compiere.util.DisplayType;
|
||||||
import org.compiere.util.Ini;
|
import org.compiere.util.Ini;
|
||||||
import org.compiere.util.Language;
|
import org.compiere.util.Language;
|
||||||
import org.compiere.util.Trx;
|
import org.compiere.util.Trx;
|
||||||
import org.compiere.util.Util;
|
|
||||||
|
|
||||||
import com.mchange.v2.c3p0.ComboPooledDataSource;
|
import com.zaxxer.hikari.HikariConfig;
|
||||||
|
import com.zaxxer.hikari.HikariDataSource;
|
||||||
|
import com.zaxxer.hikari.HikariPoolMXBean;
|
||||||
|
|
||||||
import oracle.jdbc.OracleDriver;
|
import oracle.jdbc.OracleDriver;
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ import oracle.jdbc.OracleDriver;
|
||||||
public class DB_Oracle implements AdempiereDatabase
|
public class DB_Oracle implements AdempiereDatabase
|
||||||
{
|
{
|
||||||
|
|
||||||
private static final String POOL_PROPERTIES = "pool.properties";
|
private static final String POOL_PROPERTIES = "hikaricp.properties";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Oracle Database
|
* Oracle Database
|
||||||
|
@ -112,10 +112,10 @@ public class DB_Oracle implements AdempiereDatabase
|
||||||
public static final int DEFAULT_CM_PORT = 1630;
|
public static final int DEFAULT_CM_PORT = 1630;
|
||||||
|
|
||||||
/** Connection String */
|
/** Connection String */
|
||||||
private String m_connectionURL;
|
private volatile String m_connectionURL;
|
||||||
|
|
||||||
/** Data Source */
|
/** Data Source */
|
||||||
private ComboPooledDataSource m_ds = null;
|
private volatile HikariDataSource m_ds;
|
||||||
|
|
||||||
/** Cached User Name */
|
/** Cached User Name */
|
||||||
private String m_userName = null;
|
private String m_userName = null;
|
||||||
|
@ -125,11 +125,6 @@ public class DB_Oracle implements AdempiereDatabase
|
||||||
/** Logger */
|
/** Logger */
|
||||||
private static final CLogger log = CLogger.getCLogger (DB_Oracle.class);
|
private static final CLogger log = CLogger.getCLogger (DB_Oracle.class);
|
||||||
|
|
||||||
|
|
||||||
private static int m_maxbusyconnections = 0;
|
|
||||||
|
|
||||||
private Random rand = new Random();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get Database Name
|
* Get Database Name
|
||||||
* @return database short name
|
* @return database short name
|
||||||
|
@ -318,11 +313,13 @@ public class DB_Oracle implements AdempiereDatabase
|
||||||
sb.append(m_connectionURL);
|
sb.append(m_connectionURL);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
StringBuilder logBuffer = new StringBuilder(50);
|
StringBuilder logBuffer = new StringBuilder();
|
||||||
logBuffer.append("# Connections: ").append(m_ds.getNumConnections());
|
HikariPoolMXBean mxBean = m_ds.getHikariPoolMXBean();
|
||||||
logBuffer.append(" , # Busy Connections: ").append(m_ds.getNumBusyConnections());
|
|
||||||
logBuffer.append(" , # Idle Connections: ").append(m_ds.getNumIdleConnections());
|
logBuffer.append("# Connections: ").append(mxBean.getTotalConnections());
|
||||||
logBuffer.append(" , # Orphaned Connections: ").append(m_ds.getNumUnclosedOrphanedConnections());
|
logBuffer.append(" , # Busy Connections: ").append(mxBean.getActiveConnections());
|
||||||
|
logBuffer.append(" , # Idle Connections: ").append(mxBean.getIdleConnections());
|
||||||
|
logBuffer.append(" , # Threads waiting on connection: ").append(mxBean.getThreadsAwaitingConnection());
|
||||||
}
|
}
|
||||||
catch (Exception e)
|
catch (Exception e)
|
||||||
{
|
{
|
||||||
|
@ -346,13 +343,14 @@ public class DB_Oracle implements AdempiereDatabase
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
sb.append("# Connections: ").append(m_ds.getNumConnections());
|
HikariPoolMXBean mxBean = m_ds.getHikariPoolMXBean();
|
||||||
sb.append(" , # Busy Connections: ").append(m_ds.getNumBusyConnections());
|
|
||||||
sb.append(" , # Idle Connections: ").append(m_ds.getNumIdleConnections());
|
sb.append("# Connections: ").append(mxBean.getTotalConnections());
|
||||||
sb.append(" , # Orphaned Connections: ").append(m_ds.getNumUnclosedOrphanedConnections());
|
sb.append(" , # Busy Connections: ").append(mxBean.getActiveConnections());
|
||||||
sb.append(" , # Min Pool Size: ").append(m_ds.getMinPoolSize());
|
sb.append(" , # Idle Connections: ").append(mxBean.getIdleConnections());
|
||||||
sb.append(" , # Max Pool Size: ").append(m_ds.getMaxPoolSize());
|
sb.append(" , # Threads waiting on connection: ").append(mxBean.getThreadsAwaitingConnection());
|
||||||
sb.append(" , # Max Statements Cache Per Session: ").append(m_ds.getMaxStatementsPerConnection());
|
sb.append(" , # Min Pool Size: ").append(m_ds.getMinimumIdle());
|
||||||
|
sb.append(" , # Max Pool Size: ").append(m_ds.getMaximumPoolSize());
|
||||||
sb.append(" , # Open Transactions: ").append(Trx.getOpenTransactions().length);
|
sb.append(" , # Open Transactions: ").append(Trx.getOpenTransactions().length);
|
||||||
}
|
}
|
||||||
catch (Exception e)
|
catch (Exception e)
|
||||||
|
@ -568,185 +566,31 @@ public class DB_Oracle implements AdempiereDatabase
|
||||||
return null;
|
return null;
|
||||||
} // getCommands
|
} // getCommands
|
||||||
|
|
||||||
private String getFileName ()
|
private String getPoolPropertiesFile ()
|
||||||
{
|
{
|
||||||
//
|
String base = Ini.getAdempiereHome();
|
||||||
String base = null;
|
|
||||||
if (Ini.isClient())
|
|
||||||
base = System.getProperty("user.home");
|
|
||||||
else
|
|
||||||
base = Ini.getAdempiereHome();
|
|
||||||
|
|
||||||
if (base != null && !base.endsWith(File.separator))
|
if (base != null && !base.endsWith(File.separator)) {
|
||||||
base += File.separator;
|
base += File.separator;
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
return base + getName() + File.separator + POOL_PROPERTIES;
|
return base + getName() + File.separator + POOL_PROPERTIES;
|
||||||
} // getFileName
|
} // getFileName
|
||||||
|
|
||||||
/**
|
|
||||||
* Create DataSource
|
|
||||||
* @param connection connection
|
|
||||||
* @return data dource
|
|
||||||
*/
|
|
||||||
public DataSource getDataSource(CConnection connection)
|
public DataSource getDataSource(CConnection connection)
|
||||||
{
|
{
|
||||||
if (m_ds != null)
|
ensureInitialized(connection);
|
||||||
return m_ds;
|
|
||||||
|
|
||||||
InputStream inputStream = null;
|
|
||||||
|
|
||||||
//check property file from home
|
|
||||||
String propertyFilename = getFileName();
|
|
||||||
File propertyFile = null;
|
|
||||||
if (!Util.isEmpty(propertyFilename))
|
|
||||||
{
|
|
||||||
propertyFile = new File(propertyFilename);
|
|
||||||
if (propertyFile.exists() && propertyFile.canRead())
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
inputStream = new FileInputStream(propertyFile);
|
|
||||||
} catch (FileNotFoundException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
URL url = null;
|
|
||||||
if (inputStream == null)
|
|
||||||
{
|
|
||||||
propertyFile = null;
|
|
||||||
url = Ini.isClient()
|
|
||||||
? OracleBundleActivator.bundleContext.getBundle().getEntry("META-INF/pool/client.default.properties")
|
|
||||||
: OracleBundleActivator.bundleContext.getBundle().getEntry("META-INF/pool/server.default.properties");
|
|
||||||
|
|
||||||
try {
|
|
||||||
inputStream = url.openStream();
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new DBException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Properties poolProperties = new Properties();
|
|
||||||
try {
|
|
||||||
poolProperties.load(inputStream);
|
|
||||||
inputStream.close();
|
|
||||||
inputStream = null;
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new DBException(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
//auto create property file at home folder from default config
|
|
||||||
if (propertyFile == null)
|
|
||||||
{
|
|
||||||
String directoryName = propertyFilename.substring(0, propertyFilename.length() - (POOL_PROPERTIES.length()+1));
|
|
||||||
File dir = new File(directoryName);
|
|
||||||
if (!dir.exists())
|
|
||||||
dir.mkdir();
|
|
||||||
propertyFile = new File(propertyFilename);
|
|
||||||
try {
|
|
||||||
inputStream = url.openStream();
|
|
||||||
Files.copy(inputStream, propertyFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
|
|
||||||
inputStream.close();
|
|
||||||
inputStream = null;
|
|
||||||
} catch (FileNotFoundException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
if (inputStream != null)
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
inputStream.close();
|
|
||||||
} catch (IOException e) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
int idleConnectionTestPeriod = getIntProperty(poolProperties, "IdleConnectionTestPeriod", 1200);
|
|
||||||
int acquireRetryAttempts = getIntProperty(poolProperties, "AcquireRetryAttempts", 2);
|
|
||||||
int maxIdleTimeExcessConnections = getIntProperty(poolProperties, "MaxIdleTimeExcessConnections", 1200);
|
|
||||||
int maxIdleTime = getIntProperty(poolProperties, "MaxIdleTime", 1200);
|
|
||||||
int unreturnedConnectionTimeout = getIntProperty(poolProperties, "UnreturnedConnectionTimeout", 0);
|
|
||||||
boolean testConnectionOnCheckin = getBooleanProperty(poolProperties, "TestConnectionOnCheckin", false);
|
|
||||||
boolean testConnectionOnCheckout = getBooleanProperty(poolProperties, "TestConnectionOnCheckout", true);
|
|
||||||
String mlogClass = getStringProperty(poolProperties, "com.mchange.v2.log.MLog", "com.mchange.v2.log.FallbackMLog");
|
|
||||||
int checkoutTimeout = getIntProperty(poolProperties, "CheckoutTimeout", 0);
|
|
||||||
int statementCacheNumDeferredCloseThreads = getIntProperty(poolProperties, "StatementCacheNumDeferredCloseThreads", 0);
|
|
||||||
try
|
|
||||||
{
|
|
||||||
System.setProperty("com.mchange.v2.log.MLog", mlogClass);
|
|
||||||
//System.setProperty("com.mchange.v2.log.FallbackMLog.DEFAULT_CUTOFF_LEVEL", "ALL");
|
|
||||||
ComboPooledDataSource cpds = new ComboPooledDataSource();
|
|
||||||
cpds.setDataSourceName("iDempiereDS");
|
|
||||||
cpds.setDriverClass(DRIVER);
|
|
||||||
//loads the jdbc driver
|
|
||||||
cpds.setJdbcUrl(getConnectionURL(connection));
|
|
||||||
cpds.setUser(connection.getDbUid());
|
|
||||||
cpds.setPassword(connection.getDbPwd());
|
|
||||||
//cpds.setPreferredTestQuery(DEFAULT_CONN_TEST_SQL);
|
|
||||||
cpds.setIdleConnectionTestPeriod(idleConnectionTestPeriod);
|
|
||||||
cpds.setAcquireRetryAttempts(acquireRetryAttempts);
|
|
||||||
cpds.setTestConnectionOnCheckin(testConnectionOnCheckin);
|
|
||||||
cpds.setTestConnectionOnCheckout(testConnectionOnCheckout);
|
|
||||||
if (checkoutTimeout > 0)
|
|
||||||
cpds.setCheckoutTimeout(checkoutTimeout);
|
|
||||||
cpds.setStatementCacheNumDeferredCloseThreads(statementCacheNumDeferredCloseThreads);
|
|
||||||
cpds.setMaxIdleTimeExcessConnections(maxIdleTimeExcessConnections);
|
|
||||||
cpds.setMaxIdleTime(maxIdleTime);
|
|
||||||
if (Ini.isClient())
|
|
||||||
{
|
|
||||||
int maxPoolSize = getIntProperty(poolProperties, "MaxPoolSize", 15);
|
|
||||||
int initialPoolSize = getIntProperty(poolProperties, "InitialPoolSize", 1);
|
|
||||||
int minPoolSize = getIntProperty(poolProperties, "MinPoolSize", 1);
|
|
||||||
cpds.setInitialPoolSize(initialPoolSize);
|
|
||||||
cpds.setMinPoolSize(minPoolSize);
|
|
||||||
cpds.setMaxPoolSize(maxPoolSize);
|
|
||||||
m_maxbusyconnections = (int) (maxPoolSize * 0.9);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
int maxPoolSize = getIntProperty(poolProperties, "MaxPoolSize", 400);
|
|
||||||
int initialPoolSize = getIntProperty(poolProperties, "InitialPoolSize", 10);
|
|
||||||
int minPoolSize = getIntProperty(poolProperties, "MinPoolSize", 5);
|
|
||||||
cpds.setInitialPoolSize(initialPoolSize);
|
|
||||||
cpds.setMinPoolSize(minPoolSize);
|
|
||||||
cpds.setMaxPoolSize(maxPoolSize);
|
|
||||||
m_maxbusyconnections = (int) (maxPoolSize * 0.9);
|
|
||||||
|
|
||||||
//statement pooling
|
|
||||||
int maxStatementsPerConnection = getIntProperty(poolProperties, "MaxStatementsPerConnection", 0);
|
|
||||||
if (maxStatementsPerConnection > 0)
|
|
||||||
cpds.setMaxStatementsPerConnection(maxStatementsPerConnection);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (unreturnedConnectionTimeout > 0)
|
|
||||||
{
|
|
||||||
//the following sometimes kill active connection!
|
|
||||||
cpds.setUnreturnedConnectionTimeout(1200);
|
|
||||||
cpds.setDebugUnreturnedConnectionStackTraces(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
m_ds = cpds;
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
m_ds = null;
|
|
||||||
//log might cause infinite loop since it will try to acquire database connection again
|
|
||||||
//log.log(Level.SEVERE, "Could not initialise C3P0 Datasource", ex);
|
|
||||||
System.err.println("Could not initialise C3P0 Datasource: " + ex.getLocalizedMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
return m_ds;
|
return m_ds;
|
||||||
} // getDataSource
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get Cached Connection
|
* Get Cached Connection
|
||||||
* @param connection info
|
* @param connection connection
|
||||||
* @param autoCommit true if autocommit connection
|
* @param autoCommit auto commit
|
||||||
* @param transactionIsolation Connection transaction level
|
* @param transactionIsolation trx isolation
|
||||||
* @return connection or null
|
* @return Connection
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
public Connection getCachedConnection (CConnection connection,
|
public Connection getCachedConnection (CConnection connection,
|
||||||
|
@ -754,100 +598,25 @@ public class DB_Oracle implements AdempiereDatabase
|
||||||
throws Exception
|
throws Exception
|
||||||
{
|
{
|
||||||
Connection conn = null;
|
Connection conn = null;
|
||||||
Exception exception = null;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if (m_ds == null)
|
if (m_ds == null)
|
||||||
getDataSource(connection);
|
getDataSource(connection);
|
||||||
|
|
||||||
//
|
|
||||||
try
|
|
||||||
{
|
|
||||||
int numConnections = m_ds.getNumBusyConnections();
|
|
||||||
if(numConnections >= m_maxbusyconnections && m_maxbusyconnections > 0)
|
|
||||||
{
|
|
||||||
//system is under heavy load, wait between 20 to 40 seconds
|
|
||||||
int randomNum = rand.nextInt(40 - 20 + 1) + 20;
|
|
||||||
Thread.sleep(randomNum * 1000);
|
|
||||||
}
|
|
||||||
conn = m_ds.getConnection();
|
|
||||||
if (conn == null) {
|
|
||||||
//try again after 10 to 30 seconds
|
|
||||||
int randomNum = rand.nextInt(30 - 10 + 1) + 10;
|
|
||||||
Thread.sleep(randomNum * 1000);
|
|
||||||
conn = m_ds.getConnection();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (conn != null)
|
// If HikariCP has no available free connection this call will block until either
|
||||||
{
|
// a connection becomes available or the configured 'connectionTimeout' value is
|
||||||
|
// reached (after which a SQLException is thrown).
|
||||||
|
conn = m_ds.getConnection();
|
||||||
|
|
||||||
if (conn.getTransactionIsolation() != transactionIsolation)
|
if (conn.getTransactionIsolation() != transactionIsolation)
|
||||||
|
{
|
||||||
conn.setTransactionIsolation(transactionIsolation);
|
conn.setTransactionIsolation(transactionIsolation);
|
||||||
|
}
|
||||||
if (conn.getAutoCommit() != autoCommit)
|
if (conn.getAutoCommit() != autoCommit)
|
||||||
|
{
|
||||||
conn.setAutoCommit(autoCommit);
|
conn.setAutoCommit(autoCommit);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
catch (Exception e)
|
|
||||||
{
|
|
||||||
exception = e;
|
|
||||||
conn = null;
|
|
||||||
if (DBException.isInvalidUserPassError(e))
|
|
||||||
{
|
|
||||||
//log might cause infinite loop since it will try to acquire database connection again
|
|
||||||
/*
|
|
||||||
log.severe("Cannot connect to database: "
|
|
||||||
+ getConnectionURL(connection)
|
|
||||||
+ " - UserID=" + connection.getDbUid());
|
|
||||||
*/
|
|
||||||
StringBuilder msgerr = new StringBuilder("Cannot connect to database: ")
|
|
||||||
.append(getConnectionURL(connection))
|
|
||||||
.append(" - UserID=").append(connection.getDbUid());
|
|
||||||
System.err.println(msgerr.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (conn == null && exception != null)
|
|
||||||
{
|
|
||||||
//log might cause infinite loop since it will try to acquire database connection again
|
|
||||||
/*
|
|
||||||
log.log(Level.SEVERE, exception.toString());
|
|
||||||
log.fine(toString()); */
|
|
||||||
System.err.println(exception.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception e)
|
|
||||||
{
|
|
||||||
exception = e;
|
|
||||||
}
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if (conn != null) {
|
|
||||||
boolean trace = "true".equalsIgnoreCase(System.getProperty("org.adempiere.db.traceStatus"));
|
|
||||||
int numConnections = m_ds.getNumBusyConnections();
|
|
||||||
if (numConnections > 1)
|
|
||||||
{
|
|
||||||
if (trace)
|
|
||||||
{
|
|
||||||
log.warning(getStatus());
|
|
||||||
}
|
|
||||||
if(numConnections >= m_maxbusyconnections && m_maxbusyconnections > 0)
|
|
||||||
{
|
|
||||||
if (!trace)
|
|
||||||
log.warning(getStatus());
|
|
||||||
//hengsin: make a best effort to reclaim leak connection
|
|
||||||
Runtime.getRuntime().runFinalization();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
//don't use log.severe here as it will try to access db again
|
|
||||||
System.err.println("Failed to acquire new connection. Status=" + getStatus());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
if (exception != null)
|
|
||||||
throw exception;
|
|
||||||
return conn;
|
return conn;
|
||||||
} // getCachedConnection
|
} // getCachedConnection
|
||||||
|
|
||||||
|
@ -879,35 +648,119 @@ public class DB_Oracle implements AdempiereDatabase
|
||||||
return DriverManager.getConnection (dbUrl, dbUid, dbPwd);
|
return DriverManager.getConnection (dbUrl, dbUid, dbPwd);
|
||||||
} // getDriverConnection
|
} // getDriverConnection
|
||||||
|
|
||||||
|
private Properties getPoolProperties() {
|
||||||
|
//check property file from home
|
||||||
|
File userPropertyFile = new File(getPoolPropertiesFile());
|
||||||
|
URL propertyFileURL = null;
|
||||||
|
|
||||||
|
if (userPropertyFile.exists() && userPropertyFile.canRead())
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
propertyFileURL = userPropertyFile.toURI().toURL();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (propertyFileURL == null)
|
||||||
|
{
|
||||||
|
propertyFileURL = OracleBundleActivator.bundleContext.getBundle().getEntry("META-INF/pool/server.default.properties");
|
||||||
|
}
|
||||||
|
|
||||||
|
Properties poolProperties = new Properties();
|
||||||
|
try (InputStream propertyFileInputStream = propertyFileURL.openStream()) {
|
||||||
|
poolProperties.load(propertyFileInputStream);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
//auto create property file at home folder from default config
|
||||||
|
if (!userPropertyFile.exists())
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
Path directory = userPropertyFile.toPath().getParent();
|
||||||
|
Files.createDirectories(directory);
|
||||||
|
|
||||||
|
try (InputStream propertyFileInputStream = propertyFileURL.openStream()) {
|
||||||
|
Files.copy(propertyFileInputStream, userPropertyFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return poolProperties;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Boolean to indicate the PostgreSQL connection pool is either initializing or initialized.*/
|
||||||
|
private final AtomicBoolean initialized = new AtomicBoolean(false);
|
||||||
|
/** Latch which can be used to wait for initialization completion. */
|
||||||
|
private final CountDownLatch initializedLatch = new CountDownLatch(1);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Allows the connection pool to be lazily initialized. While it might be preferable to do
|
||||||
|
* this once upon initialization of this class the current design of iDempiere makes this
|
||||||
|
* hard.
|
||||||
|
*
|
||||||
|
* Calling this method will block until the pool is configured. This does NOT mean it will
|
||||||
|
* block until a database connection has been setup.
|
||||||
|
*
|
||||||
|
* @param connection
|
||||||
|
*/
|
||||||
|
private void ensureInitialized(CConnection connection) {
|
||||||
|
if (!initialized.compareAndSet(false, true)) {
|
||||||
|
try {
|
||||||
|
initializedLatch.await();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
Properties poolProperties = getPoolProperties();
|
||||||
|
// Do not override values which might have been read from the users
|
||||||
|
// hikaricp.properties file.
|
||||||
|
if(!poolProperties.contains("jdbcUrl")) {
|
||||||
|
poolProperties.put("jdbcUrl", getConnectionURL(connection));
|
||||||
|
}
|
||||||
|
if (!poolProperties.contains("username")) {
|
||||||
|
poolProperties.put("username", connection.getDbUid());
|
||||||
|
}
|
||||||
|
if (!poolProperties.contains("password")) {
|
||||||
|
poolProperties.put("password", connection.getDbPwd());
|
||||||
|
}
|
||||||
|
|
||||||
|
HikariConfig hikariConfig = new HikariConfig(poolProperties);
|
||||||
|
m_ds = new HikariDataSource(hikariConfig);
|
||||||
|
|
||||||
|
m_connectionURL = m_ds.getJdbcUrl();
|
||||||
|
|
||||||
|
initializedLatch.countDown();
|
||||||
|
}
|
||||||
|
catch (Exception ex) {
|
||||||
|
throw new IllegalStateException("Could not initialise Hikari Datasource", ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Close
|
* Close
|
||||||
*/
|
*/
|
||||||
public void close()
|
public void close()
|
||||||
{
|
{
|
||||||
if (log.isLoggable(Level.CONFIG)) log.config(toString());
|
if (log.isLoggable(Level.CONFIG))
|
||||||
if (m_ds != null)
|
|
||||||
{
|
{
|
||||||
try
|
log.config(toString());
|
||||||
{
|
|
||||||
//wait 5 seconds if pool is still busy
|
|
||||||
if (m_ds.getNumBusyConnections() > 0)
|
|
||||||
{
|
|
||||||
Thread.sleep(5 * 1000);
|
|
||||||
}
|
|
||||||
} catch (Exception e)
|
|
||||||
{
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
m_ds.close();
|
m_ds.close();
|
||||||
}
|
}
|
||||||
catch (Exception e)
|
catch (Exception e)
|
||||||
{
|
{
|
||||||
log.log(Level.SEVERE, "Could not close Data Source");
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
|
||||||
m_ds = null;
|
|
||||||
} // close
|
} // close
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1027,45 +880,6 @@ public class DB_Oracle implements AdempiereDatabase
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private int getIntProperty(Properties properties, String key, int defaultValue)
|
|
||||||
{
|
|
||||||
int i = defaultValue;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
String s = properties.getProperty(key);
|
|
||||||
if (s != null && s.trim().length() > 0)
|
|
||||||
i = Integer.parseInt(s);
|
|
||||||
}
|
|
||||||
catch (Exception e) {}
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean getBooleanProperty(Properties properties, String key, boolean defaultValue)
|
|
||||||
{
|
|
||||||
boolean b = defaultValue;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
String s = properties.getProperty(key);
|
|
||||||
if (s != null && s.trim().length() > 0)
|
|
||||||
b = Boolean.valueOf(s);
|
|
||||||
}
|
|
||||||
catch (Exception e) {}
|
|
||||||
return b;
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getStringProperty(Properties properties, String key, String defaultValue)
|
|
||||||
{
|
|
||||||
String b = defaultValue;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
String s = properties.getProperty(key);
|
|
||||||
if (s != null && s.trim().length() > 0)
|
|
||||||
b = s.trim();
|
|
||||||
}
|
|
||||||
catch(Exception e){}
|
|
||||||
return b;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean forUpdate(PO po, int timeout) {
|
public boolean forUpdate(PO po, int timeout) {
|
||||||
//only can lock for update if using trx
|
//only can lock for update if using trx
|
||||||
|
|
|
@ -6,9 +6,8 @@ Bundle-Version: 10.0.0.qualifier
|
||||||
Bundle-RequiredExecutionEnvironment: JavaSE-11
|
Bundle-RequiredExecutionEnvironment: JavaSE-11
|
||||||
Require-Capability: osgi.ee;filter:="(&(osgi.ee=JavaSE)(version>=11))"
|
Require-Capability: osgi.ee;filter:="(&(osgi.ee=JavaSE)(version>=11))"
|
||||||
Bundle-ClassPath: .,
|
Bundle-ClassPath: .,
|
||||||
lib/c3p0.jar,
|
lib/postgresql.jar,
|
||||||
lib/mchange-commons-java.jar,
|
lib/HikariCP.jar
|
||||||
lib/postgresql.jar
|
|
||||||
Require-Bundle: org.adempiere.base;bundle-version="0.0.0",
|
Require-Bundle: org.adempiere.base;bundle-version="0.0.0",
|
||||||
org.adempiere.install;bundle-version="0.0.0";resolution:=optional
|
org.adempiere.install;bundle-version="0.0.0";resolution:=optional
|
||||||
Import-Package: org.osgi.framework,
|
Import-Package: org.osgi.framework,
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
#timeout
|
|
||||||
IdleConnectionTestPeriod=1200
|
|
||||||
AcquireRetryAttempts=2
|
|
||||||
MaxIdleTimeExcessConnections=1200
|
|
||||||
MaxIdleTime=1200
|
|
||||||
#UnreturnedConnectionTimeout=1800
|
|
||||||
|
|
||||||
#size
|
|
||||||
MaxPoolSize=15
|
|
||||||
InitialPoolSize=1
|
|
||||||
MinPoolSize=1
|
|
||||||
|
|
||||||
#flag
|
|
||||||
TestConnectionOnCheckin=false
|
|
||||||
TestConnectionOnCheckout=true
|
|
||||||
#CheckoutTimeout=60;
|
|
|
@ -1,19 +1,72 @@
|
||||||
#timeout
|
# !! ALL SETTINGS PRESENT IN THIS FILE WILL BE FED IN TO HIKARICP !!
|
||||||
IdleConnectionTestPeriod=1200
|
# !! DO NOT SET EMPTY VALUES !!
|
||||||
AcquireRetryAttempts=2
|
#
|
||||||
MaxIdleTimeExcessConnections=1200
|
# You can add HikariCP settings that are not present in this file. In order to
|
||||||
MaxIdleTime=1200
|
# use the default just remove or comment out the key all together.
|
||||||
#UnreturnedConnectionTimeout=1800
|
|
||||||
|
|
||||||
#size
|
# This property controls the maximum number of milliseconds that a client (that's you)
|
||||||
# Verify that MaxPoolSize is lesser than max_connections defined on postgresql.conf
|
# will wait for a connection from the pool. If this time is exceeded without a
|
||||||
MaxPoolSize=90
|
# connection becoming available, a SQLException will be thrown. Lowest acceptable
|
||||||
InitialPoolSize=10
|
# connection timeout is 250 ms.
|
||||||
MinPoolSize=5
|
# Default: 30000 (30 seconds)
|
||||||
MaxStatementsPerConnection=30
|
connectionTimeout=60000
|
||||||
|
|
||||||
#flag
|
# This property controls the maximum amount of time that a connection is allowed
|
||||||
TestConnectionOnCheckin=false
|
# to sit idle in the pool. This setting only applies when minimumIdle is defined
|
||||||
TestConnectionOnCheckout=true
|
# to be less than maximumPoolSize. Idle connections will not be retired once the
|
||||||
#CheckoutTimeout=60;
|
# pool reaches minimumIdle connections. Whether a connection is retired as idle
|
||||||
com.mchange.v2.log.MLog=com.mchange.v2.log.slf4j.Slf4jMLog
|
# or not is subject to a maximum variation of +30 seconds, and average variation
|
||||||
|
# of +15 seconds. A connection will never be retired as idle before this timeout.
|
||||||
|
# A value of 0 means that idle connections are never removed from the pool.
|
||||||
|
# The minimum allowed value is 10000ms (10 seconds).
|
||||||
|
# Default: 600000 (10 minutes)
|
||||||
|
#idleTimeout=
|
||||||
|
|
||||||
|
# This property controls how frequently HikariCP will attempt to keep a connection
|
||||||
|
# alive, in order to prevent it from being timed out by the database or network infrastructure.
|
||||||
|
# This value must be less than the maxLifetime value. A "keepalive" will only occur on an idle
|
||||||
|
# connection. When the time arrives for a "keepalive" against a given connection, that
|
||||||
|
# connection will be removed from the pool, "pinged", and then returned to the pool. The
|
||||||
|
# 'ping' is one of either: invocation of the JDBC4 isValid() method, or execution of the
|
||||||
|
# connectionTestQuery. Typically, the duration out-of-the-pool should be measured in single
|
||||||
|
# digit milliseconds or even sub-millisecond, and therefore should have little or no noticible
|
||||||
|
# performance impact. The minimum allowed value is 30000ms (30 seconds), but a value in the
|
||||||
|
# range of minutes is most desirable. Default: 0 (disabled)
|
||||||
|
#keepaliveTime=
|
||||||
|
|
||||||
|
# This property controls the minimum number of idle connections that HikariCP
|
||||||
|
# tries to maintain in the pool. If the idle connections dip below this value
|
||||||
|
# and total connections in the pool are less than maximumPoolSize, HikariCP
|
||||||
|
# will make a best effort to add additional connections quickly and efficiently.
|
||||||
|
# However, for maximum performance and responsiveness to spike demands, we
|
||||||
|
# recommend not setting this value and instead allowing HikariCP to act as a
|
||||||
|
# fixed size connection pool.
|
||||||
|
# Default: same as maximumPoolSize
|
||||||
|
#minimumIdle=
|
||||||
|
|
||||||
|
# This property controls the maximum size that the pool is allowed to reach,
|
||||||
|
# including both idle and in-use connections. Basically this value will determine
|
||||||
|
# the maximum number of actual connections to the database backend. A reasonable
|
||||||
|
# value for this is best determined by your execution environment. When the pool
|
||||||
|
# reaches this size, and no idle connections are available, calls to getConnection()
|
||||||
|
# will block for up to connectionTimeout milliseconds before timing out. Please
|
||||||
|
# read about pool sizing: https://github.com/brettwooldridge/HikariCP/wiki/About-Pool-Sizing
|
||||||
|
# Default: 10
|
||||||
|
maximumPoolSize=30
|
||||||
|
|
||||||
|
# This property controls the maximum lifetime of a connection in the pool. An
|
||||||
|
# in-use connection will never be retired, only when it is closed will it then be
|
||||||
|
# removed. On a connection-by-connection basis, minor negative attenuation is applied
|
||||||
|
# to avoid mass-extinction in the pool. We strongly recommend setting this value, and
|
||||||
|
# it should be several seconds shorter than any database or infrastructure imposed
|
||||||
|
# connection time limit. A value of 0 indicates no maximum lifetime (infinite lifetime),
|
||||||
|
# subject of course to the idleTimeout setting. The minimum allowed value is 30000ms
|
||||||
|
# (30 seconds).
|
||||||
|
# Default: 1800000 (30 minutes)
|
||||||
|
#maxLifetime=
|
||||||
|
|
||||||
|
# This property controls the amount of time that a connection can be out of the
|
||||||
|
# pool before a message is logged indicating a possible connection leak. A value of 0
|
||||||
|
# means leak detection is disabled. Lowest acceptable value for enabling leak detection
|
||||||
|
# is 2000 (2 seconds). Default: 0
|
||||||
|
leakDetectionThreshold=300000
|
|
@ -4,7 +4,6 @@ bin.includes = META-INF/,\
|
||||||
plugin.xml,\
|
plugin.xml,\
|
||||||
OSGI-INF/pgprovider.xml,\
|
OSGI-INF/pgprovider.xml,\
|
||||||
OSGI-INF/,\
|
OSGI-INF/,\
|
||||||
lib/c3p0.jar,\
|
lib/postgresql.jar,\
|
||||||
lib/mchange-commons-java.jar,\
|
lib/HikariCP.jar
|
||||||
lib/postgresql.jar
|
|
||||||
source.. = src/
|
source.. = src/
|
||||||
|
|
|
@ -24,14 +24,9 @@
|
||||||
<configuration>
|
<configuration>
|
||||||
<artifactItems>
|
<artifactItems>
|
||||||
<artifactItem>
|
<artifactItem>
|
||||||
<groupId>com.mchange</groupId>
|
<groupId>com.zaxxer</groupId>
|
||||||
<artifactId>c3p0</artifactId>
|
<artifactId>HikariCP</artifactId>
|
||||||
<version>0.9.5.5</version>
|
<version>5.0.1</version>
|
||||||
</artifactItem>
|
|
||||||
<artifactItem>
|
|
||||||
<groupId>com.mchange</groupId>
|
|
||||||
<artifactId>mchange-commons-java</artifactId>
|
|
||||||
<version>0.2.20</version>
|
|
||||||
</artifactItem>
|
</artifactItem>
|
||||||
<artifactItem>
|
<artifactItem>
|
||||||
<groupId>org.postgresql</groupId>
|
<groupId>org.postgresql</groupId>
|
||||||
|
|
|
@ -19,14 +19,12 @@
|
||||||
package org.compiere.db;
|
package org.compiere.db;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.math.RoundingMode;
|
import java.math.RoundingMode;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
import java.nio.file.StandardCopyOption;
|
import java.nio.file.StandardCopyOption;
|
||||||
import java.sql.Connection;
|
import java.sql.Connection;
|
||||||
import java.sql.DriverManager;
|
import java.sql.DriverManager;
|
||||||
|
@ -38,7 +36,8 @@ import java.sql.Timestamp;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.Random;
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
|
|
||||||
import javax.sql.ConnectionPoolDataSource;
|
import javax.sql.ConnectionPoolDataSource;
|
||||||
|
@ -61,7 +60,9 @@ import org.compiere.util.Language;
|
||||||
import org.compiere.util.Trx;
|
import org.compiere.util.Trx;
|
||||||
import org.compiere.util.Util;
|
import org.compiere.util.Util;
|
||||||
|
|
||||||
import com.mchange.v2.c3p0.ComboPooledDataSource;
|
import com.zaxxer.hikari.HikariConfig;
|
||||||
|
import com.zaxxer.hikari.HikariDataSource;
|
||||||
|
import com.zaxxer.hikari.HikariPoolMXBean;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* PostgreSQL Database Port
|
* PostgreSQL Database Port
|
||||||
|
@ -79,7 +80,7 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
|
|
||||||
private static final String P_POSTGRE_SQL_NATIVE = "PostgreSQLNative";
|
private static final String P_POSTGRE_SQL_NATIVE = "PostgreSQLNative";
|
||||||
|
|
||||||
private static final String POOL_PROPERTIES = "pool.properties";
|
private static final String POOL_PROPERTIES = "hikaricp.properties";
|
||||||
|
|
||||||
private static Boolean sysNative = null;
|
private static Boolean sysNative = null;
|
||||||
|
|
||||||
|
@ -99,9 +100,8 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
/**
|
/**
|
||||||
* PostgreSQL Database
|
* PostgreSQL Database
|
||||||
*/
|
*/
|
||||||
public DB_PostgreSQL()
|
public DB_PostgreSQL() {
|
||||||
{
|
}
|
||||||
} // DB_PostgreSQL
|
|
||||||
|
|
||||||
/** Driver */
|
/** Driver */
|
||||||
private org.postgresql.Driver s_driver = null;
|
private org.postgresql.Driver s_driver = null;
|
||||||
|
@ -113,12 +113,10 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
public static final int DEFAULT_PORT = 5432;
|
public static final int DEFAULT_PORT = 5432;
|
||||||
|
|
||||||
/** Data Source */
|
/** Data Source */
|
||||||
private ComboPooledDataSource m_ds = null;
|
private volatile HikariDataSource m_ds;
|
||||||
|
|
||||||
/** Statement Converter */
|
/** Statement Converter */
|
||||||
private Convert_PostgreSQL m_convert = new Convert_PostgreSQL();
|
private Convert_PostgreSQL m_convert = new Convert_PostgreSQL();
|
||||||
/** Connection String */
|
|
||||||
private String m_connection;
|
|
||||||
/** Cached Database Name */
|
/** Cached Database Name */
|
||||||
private String m_dbName = null;
|
private String m_dbName = null;
|
||||||
|
|
||||||
|
@ -131,14 +129,10 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
/** Logger */
|
/** Logger */
|
||||||
private static final CLogger log = CLogger.getCLogger (DB_PostgreSQL.class);
|
private static final CLogger log = CLogger.getCLogger (DB_PostgreSQL.class);
|
||||||
|
|
||||||
private static int m_maxbusyconnections = 0;
|
|
||||||
|
|
||||||
private static final String NATIVE_MARKER = "NATIVE_"+Database.DB_POSTGRESQL+"_KEYWORK";
|
private static final String NATIVE_MARKER = "NATIVE_"+Database.DB_POSTGRESQL+"_KEYWORK";
|
||||||
|
|
||||||
private CCache<String, String> convertCache = new CCache<String, String>(null, "DB_PostgreSQL_Convert_Cache", 1000, CCache.DEFAULT_EXPIRE_MINUTE, false);
|
private CCache<String, String> convertCache = new CCache<String, String>(null, "DB_PostgreSQL_Convert_Cache", 1000, CCache.DEFAULT_EXPIRE_MINUTE, false);
|
||||||
|
|
||||||
private Random rand = new Random();
|
|
||||||
|
|
||||||
private static final List<String> reservedKeywords = Arrays.asList("limit","action","old","new");
|
private static final List<String> reservedKeywords = Arrays.asList("limit","action","old","new");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -216,8 +210,7 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
sb.append("&").append(urlParameters);
|
sb.append("&").append(urlParameters);
|
||||||
}
|
}
|
||||||
|
|
||||||
m_connection = sb.toString();
|
return sb.toString();
|
||||||
return m_connection;
|
|
||||||
} // getConnectionString
|
} // getConnectionString
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -299,11 +292,13 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
sb.append(m_connectionURL);
|
sb.append(m_connectionURL);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
StringBuilder logBuffer = new StringBuilder(50);
|
StringBuilder logBuffer = new StringBuilder();
|
||||||
logBuffer.append("# Connections: ").append(m_ds.getNumConnections());
|
HikariPoolMXBean mxBean = m_ds.getHikariPoolMXBean();
|
||||||
logBuffer.append(" , # Busy Connections: ").append(m_ds.getNumBusyConnections());
|
|
||||||
logBuffer.append(" , # Idle Connections: ").append(m_ds.getNumIdleConnections());
|
logBuffer.append("# Connections: ").append(mxBean.getTotalConnections());
|
||||||
logBuffer.append(" , # Orphaned Connections: ").append(m_ds.getNumUnclosedOrphanedConnections());
|
logBuffer.append(" , # Busy Connections: ").append(mxBean.getActiveConnections());
|
||||||
|
logBuffer.append(" , # Idle Connections: ").append(mxBean.getIdleConnections());
|
||||||
|
logBuffer.append(" , # Threads waiting on connection: ").append(mxBean.getThreadsAwaitingConnection());
|
||||||
}
|
}
|
||||||
catch (Exception e)
|
catch (Exception e)
|
||||||
{
|
{
|
||||||
|
@ -327,13 +322,14 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
sb.append("# Connections: ").append(m_ds.getNumConnections());
|
HikariPoolMXBean mxBean = m_ds.getHikariPoolMXBean();
|
||||||
sb.append(" , # Busy Connections: ").append(m_ds.getNumBusyConnections());
|
|
||||||
sb.append(" , # Idle Connections: ").append(m_ds.getNumIdleConnections());
|
sb.append("# Connections: ").append(mxBean.getTotalConnections());
|
||||||
sb.append(" , # Orphaned Connections: ").append(m_ds.getNumUnclosedOrphanedConnections());
|
sb.append(" , # Busy Connections: ").append(mxBean.getActiveConnections());
|
||||||
sb.append(" , # Min Pool Size: ").append(m_ds.getMinPoolSize());
|
sb.append(" , # Idle Connections: ").append(mxBean.getIdleConnections());
|
||||||
sb.append(" , # Max Pool Size: ").append(m_ds.getMaxPoolSize());
|
sb.append(" , # Threads waiting on connection: ").append(mxBean.getThreadsAwaitingConnection());
|
||||||
sb.append(" , # Max Statements Cache Per Session: ").append(m_ds.getMaxStatementsPerConnection());
|
sb.append(" , # Min Pool Size: ").append(m_ds.getMinimumIdle());
|
||||||
|
sb.append(" , # Max Pool Size: ").append(m_ds.getMaximumPoolSize());
|
||||||
sb.append(" , # Open Transactions: ").append(Trx.getOpenTransactions().length);
|
sb.append(" , # Open Transactions: ").append(Trx.getOpenTransactions().length);
|
||||||
}
|
}
|
||||||
catch (Exception e)
|
catch (Exception e)
|
||||||
|
@ -595,101 +591,35 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
throws Exception
|
throws Exception
|
||||||
{
|
{
|
||||||
Connection conn = null;
|
Connection conn = null;
|
||||||
Exception exception = null;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if (m_ds == null)
|
if (m_ds == null)
|
||||||
getDataSource(connection);
|
getDataSource(connection);
|
||||||
|
|
||||||
//
|
|
||||||
try
|
|
||||||
{
|
|
||||||
int numConnections = m_ds.getNumBusyConnections();
|
|
||||||
if(numConnections >= m_maxbusyconnections && m_maxbusyconnections > 0)
|
|
||||||
{
|
|
||||||
//system is under heavy load, wait between 20 to 40 seconds
|
|
||||||
int randomNum = rand.nextInt(40 - 20 + 1) + 20;
|
|
||||||
Thread.sleep(randomNum * 1000);
|
|
||||||
}
|
|
||||||
conn = m_ds.getConnection();
|
|
||||||
if (conn == null) {
|
|
||||||
//try again after 10 to 30 seconds
|
|
||||||
int randomNum = rand.nextInt(30 - 10 + 1) + 10;
|
|
||||||
Thread.sleep(randomNum * 1000);
|
|
||||||
conn = m_ds.getConnection();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (conn != null)
|
// If HikariCP has no available free connection this call will block until either
|
||||||
{
|
// a connection becomes available or the configured 'connectionTimeout' value is
|
||||||
|
// reached (after which a SQLException is thrown).
|
||||||
|
conn = m_ds.getConnection();
|
||||||
|
|
||||||
if (conn.getTransactionIsolation() != transactionIsolation)
|
if (conn.getTransactionIsolation() != transactionIsolation)
|
||||||
|
{
|
||||||
conn.setTransactionIsolation(transactionIsolation);
|
conn.setTransactionIsolation(transactionIsolation);
|
||||||
|
}
|
||||||
if (conn.getAutoCommit() != autoCommit)
|
if (conn.getAutoCommit() != autoCommit)
|
||||||
|
{
|
||||||
conn.setAutoCommit(autoCommit);
|
conn.setAutoCommit(autoCommit);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
catch (Exception e)
|
|
||||||
{
|
|
||||||
exception = e;
|
|
||||||
conn = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (conn == null && exception != null)
|
|
||||||
{
|
|
||||||
//log might cause infinite loop since it will try to acquire database connection again
|
|
||||||
/*
|
|
||||||
log.log(Level.SEVERE, exception.toString());
|
|
||||||
log.fine(toString()); */
|
|
||||||
System.err.println(exception.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception e)
|
|
||||||
{
|
|
||||||
exception = e;
|
|
||||||
}
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if (conn != null) {
|
|
||||||
boolean trace = "true".equalsIgnoreCase(System.getProperty("org.adempiere.db.traceStatus"));
|
|
||||||
int numConnections = m_ds.getNumBusyConnections();
|
|
||||||
if (numConnections > 1)
|
|
||||||
{
|
|
||||||
if (trace)
|
|
||||||
{
|
|
||||||
log.warning(getStatus());
|
|
||||||
}
|
|
||||||
if(numConnections >= m_maxbusyconnections && m_maxbusyconnections > 0)
|
|
||||||
{
|
|
||||||
if (!trace)
|
|
||||||
log.warning(getStatus());
|
|
||||||
//hengsin: make a best effort to reclaim leak connection
|
|
||||||
Runtime.getRuntime().runFinalization();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
//don't use log.severe here as it will try to access db again
|
|
||||||
System.err.println("Failed to acquire new connection. Status=" + getStatus());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
if (exception != null)
|
|
||||||
throw exception;
|
|
||||||
return conn;
|
return conn;
|
||||||
} // getCachedConnection
|
} // getCachedConnection
|
||||||
|
|
||||||
private String getFileName ()
|
private String getPoolPropertiesFile ()
|
||||||
{
|
{
|
||||||
//
|
String base = Ini.getAdempiereHome();
|
||||||
String base = null;
|
|
||||||
if (Ini.isClient())
|
|
||||||
base = System.getProperty("user.home");
|
|
||||||
else
|
|
||||||
base = Ini.getAdempiereHome();
|
|
||||||
|
|
||||||
if (base != null && !base.endsWith(File.separator))
|
if (base != null && !base.endsWith(File.separator)) {
|
||||||
base += File.separator;
|
base += File.separator;
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
return base + getName() + File.separator + POOL_PROPERTIES;
|
return base + getName() + File.separator + POOL_PROPERTIES;
|
||||||
|
@ -702,155 +632,7 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
*/
|
*/
|
||||||
public DataSource getDataSource(CConnection connection)
|
public DataSource getDataSource(CConnection connection)
|
||||||
{
|
{
|
||||||
if (m_ds != null)
|
ensureInitialized(connection);
|
||||||
return m_ds;
|
|
||||||
|
|
||||||
InputStream inputStream = null;
|
|
||||||
|
|
||||||
//check property file from home
|
|
||||||
String propertyFilename = getFileName();
|
|
||||||
File propertyFile = null;
|
|
||||||
if (!Util.isEmpty(propertyFilename))
|
|
||||||
{
|
|
||||||
propertyFile = new File(propertyFilename);
|
|
||||||
if (propertyFile.exists() && propertyFile.canRead())
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
inputStream = new FileInputStream(propertyFile);
|
|
||||||
} catch (FileNotFoundException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//fall back to default config
|
|
||||||
URL url = null;
|
|
||||||
if (inputStream == null)
|
|
||||||
{
|
|
||||||
propertyFile = null;
|
|
||||||
url = Ini.isClient()
|
|
||||||
? PostgreSQLBundleActivator.bundleContext.getBundle().getEntry("META-INF/pool/client.default.properties")
|
|
||||||
: PostgreSQLBundleActivator.bundleContext.getBundle().getEntry("META-INF/pool/server.default.properties");
|
|
||||||
|
|
||||||
try {
|
|
||||||
inputStream = url.openStream();
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Properties poolProperties = new Properties();
|
|
||||||
try {
|
|
||||||
poolProperties.load(inputStream);
|
|
||||||
inputStream.close();
|
|
||||||
inputStream = null;
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new DBException(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
//auto create property file at home folder from default config
|
|
||||||
if (propertyFile == null)
|
|
||||||
{
|
|
||||||
String directoryName = propertyFilename.substring(0, propertyFilename.length() - (POOL_PROPERTIES.length()+1));
|
|
||||||
File dir = new File(directoryName);
|
|
||||||
if (!dir.exists())
|
|
||||||
dir.mkdir();
|
|
||||||
propertyFile = new File(propertyFilename);
|
|
||||||
try {
|
|
||||||
inputStream = url.openStream();
|
|
||||||
Files.copy(inputStream, propertyFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
|
|
||||||
inputStream.close();
|
|
||||||
inputStream = null;
|
|
||||||
} catch (FileNotFoundException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
if (inputStream != null)
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
inputStream.close();
|
|
||||||
} catch (IOException e) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
int idleConnectionTestPeriod = getIntProperty(poolProperties, "IdleConnectionTestPeriod", 1200);
|
|
||||||
int acquireRetryAttempts = getIntProperty(poolProperties, "AcquireRetryAttempts", 2);
|
|
||||||
int maxIdleTimeExcessConnections = getIntProperty(poolProperties, "MaxIdleTimeExcessConnections", 1200);
|
|
||||||
int maxIdleTime = getIntProperty(poolProperties, "MaxIdleTime", 1200);
|
|
||||||
int unreturnedConnectionTimeout = getIntProperty(poolProperties, "UnreturnedConnectionTimeout", 0);
|
|
||||||
boolean testConnectionOnCheckin = getBooleanProperty(poolProperties, "TestConnectionOnCheckin", false);
|
|
||||||
boolean testConnectionOnCheckout = getBooleanProperty(poolProperties, "TestConnectionOnCheckout", true);
|
|
||||||
String mlogClass = getStringProperty(poolProperties, "com.mchange.v2.log.MLog", "com.mchange.v2.log.FallbackMLog");
|
|
||||||
|
|
||||||
int checkoutTimeout = getIntProperty(poolProperties, "CheckoutTimeout", 0);
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
System.setProperty("com.mchange.v2.log.MLog", mlogClass);
|
|
||||||
//System.setProperty("com.mchange.v2.log.FallbackMLog.DEFAULT_CUTOFF_LEVEL", "ALL");
|
|
||||||
ComboPooledDataSource cpds = new ComboPooledDataSource();
|
|
||||||
cpds.setDataSourceName("iDempiereDS");
|
|
||||||
cpds.setDriverClass(DRIVER);
|
|
||||||
//loads the jdbc driver
|
|
||||||
cpds.setJdbcUrl(getConnectionURL(connection));
|
|
||||||
cpds.setUser(connection.getDbUid());
|
|
||||||
cpds.setPassword(connection.getDbPwd());
|
|
||||||
//cpds.setPreferredTestQuery(DEFAULT_CONN_TEST_SQL);
|
|
||||||
cpds.setIdleConnectionTestPeriod(idleConnectionTestPeriod);
|
|
||||||
cpds.setMaxIdleTimeExcessConnections(maxIdleTimeExcessConnections);
|
|
||||||
cpds.setMaxIdleTime(maxIdleTime);
|
|
||||||
cpds.setTestConnectionOnCheckin(testConnectionOnCheckin);
|
|
||||||
cpds.setTestConnectionOnCheckout(testConnectionOnCheckout);
|
|
||||||
cpds.setAcquireRetryAttempts(acquireRetryAttempts);
|
|
||||||
if (checkoutTimeout > 0)
|
|
||||||
cpds.setCheckoutTimeout(checkoutTimeout);
|
|
||||||
|
|
||||||
if (Ini.isClient())
|
|
||||||
{
|
|
||||||
int maxPoolSize = getIntProperty(poolProperties, "MaxPoolSize", 15);
|
|
||||||
int initialPoolSize = getIntProperty(poolProperties, "InitialPoolSize", 1);
|
|
||||||
int minPoolSize = getIntProperty(poolProperties, "MinPoolSize", 1);
|
|
||||||
cpds.setInitialPoolSize(initialPoolSize);
|
|
||||||
cpds.setMinPoolSize(minPoolSize);
|
|
||||||
cpds.setMaxPoolSize(maxPoolSize);
|
|
||||||
|
|
||||||
m_maxbusyconnections = (int) (maxPoolSize * 0.9);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
int maxPoolSize = getIntProperty(poolProperties, "MaxPoolSize", 400);
|
|
||||||
int initialPoolSize = getIntProperty(poolProperties, "InitialPoolSize", 10);
|
|
||||||
int minPoolSize = getIntProperty(poolProperties, "MinPoolSize", 5);
|
|
||||||
cpds.setInitialPoolSize(initialPoolSize);
|
|
||||||
cpds.setInitialPoolSize(initialPoolSize);
|
|
||||||
cpds.setMinPoolSize(minPoolSize);
|
|
||||||
cpds.setMaxPoolSize(maxPoolSize);
|
|
||||||
m_maxbusyconnections = (int) (maxPoolSize * 0.9);
|
|
||||||
|
|
||||||
//statement pooling
|
|
||||||
int maxStatementsPerConnection = getIntProperty(poolProperties, "MaxStatementsPerConnection", 0);
|
|
||||||
if (maxStatementsPerConnection > 0)
|
|
||||||
cpds.setMaxStatementsPerConnection(maxStatementsPerConnection);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (unreturnedConnectionTimeout > 0)
|
|
||||||
{
|
|
||||||
//the following sometimes kill active connection!
|
|
||||||
cpds.setUnreturnedConnectionTimeout(1200);
|
|
||||||
cpds.setDebugUnreturnedConnectionStackTraces(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
m_ds = cpds;
|
|
||||||
m_connectionURL = m_ds.getJdbcUrl();
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
m_ds = null;
|
|
||||||
log.log(Level.SEVERE, "Could not initialise C3P0 Datasource", ex);
|
|
||||||
}
|
|
||||||
|
|
||||||
return m_ds;
|
return m_ds;
|
||||||
}
|
}
|
||||||
|
@ -892,27 +674,109 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
return DriverManager.getConnection (dbUrl, dbUid, dbPwd);
|
return DriverManager.getConnection (dbUrl, dbUid, dbPwd);
|
||||||
} // getDriverConnection
|
} // getDriverConnection
|
||||||
|
|
||||||
|
private Properties getPoolProperties() {
|
||||||
|
//check property file from home
|
||||||
|
File userPropertyFile = new File(getPoolPropertiesFile());
|
||||||
|
URL propertyFileURL = null;
|
||||||
|
|
||||||
|
if (userPropertyFile.exists() && userPropertyFile.canRead())
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
propertyFileURL = userPropertyFile.toURI().toURL();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (propertyFileURL == null)
|
||||||
|
{
|
||||||
|
propertyFileURL = PostgreSQLBundleActivator.bundleContext.getBundle().getEntry("META-INF/pool/server.default.properties");
|
||||||
|
}
|
||||||
|
|
||||||
|
Properties poolProperties = new Properties();
|
||||||
|
try (InputStream propertyFileInputStream = propertyFileURL.openStream()) {
|
||||||
|
poolProperties.load(propertyFileInputStream);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
//auto create property file at home folder from default config
|
||||||
|
if (!userPropertyFile.exists())
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
Path directory = userPropertyFile.toPath().getParent();
|
||||||
|
Files.createDirectories(directory);
|
||||||
|
|
||||||
|
try (InputStream propertyFileInputStream = propertyFileURL.openStream()) {
|
||||||
|
Files.copy(propertyFileInputStream, userPropertyFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return poolProperties;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Boolean to indicate the PostgreSQL connection pool is either initializing or initialized.*/
|
||||||
|
private final AtomicBoolean initialized = new AtomicBoolean(false);
|
||||||
|
/** Latch which can be used to wait for initialization completion. */
|
||||||
|
private final CountDownLatch initializedLatch = new CountDownLatch(1);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Allows the connection pool to be lazily initialized. While it might be preferable to do
|
||||||
|
* this once upon initialization of this class the current design of iDempiere makes this
|
||||||
|
* hard.
|
||||||
|
*
|
||||||
|
* Calling this method will block until the pool is configured. This does NOT mean it will
|
||||||
|
* block until a database connection has been setup.
|
||||||
|
*
|
||||||
|
* @param connection
|
||||||
|
*/
|
||||||
|
private void ensureInitialized(CConnection connection) {
|
||||||
|
if (!initialized.compareAndSet(false, true)) {
|
||||||
|
try {
|
||||||
|
initializedLatch.await();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
Properties poolProperties = getPoolProperties();
|
||||||
|
// Do not override values which might have been read from the users
|
||||||
|
// hikaricp.properties file.
|
||||||
|
if(!poolProperties.contains("jdbcUrl")) {
|
||||||
|
poolProperties.put("jdbcUrl", getConnectionURL(connection));
|
||||||
|
}
|
||||||
|
if (!poolProperties.contains("username")) {
|
||||||
|
poolProperties.put("username", connection.getDbUid());
|
||||||
|
}
|
||||||
|
if (!poolProperties.contains("password")) {
|
||||||
|
poolProperties.put("password", connection.getDbPwd());
|
||||||
|
}
|
||||||
|
|
||||||
|
HikariConfig hikariConfig = new HikariConfig(poolProperties);
|
||||||
|
m_ds = new HikariDataSource(hikariConfig);
|
||||||
|
|
||||||
|
m_connectionURL = m_ds.getJdbcUrl();
|
||||||
|
|
||||||
|
initializedLatch.countDown();
|
||||||
|
}
|
||||||
|
catch (Exception ex) {
|
||||||
|
throw new IllegalStateException("Could not initialise Hikari Datasource", ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Close
|
* Close
|
||||||
*/
|
*/
|
||||||
public void close()
|
public void close()
|
||||||
{
|
{
|
||||||
|
if (log.isLoggable(Level.CONFIG))
|
||||||
if (log.isLoggable(Level.CONFIG)) log.config(toString());
|
|
||||||
|
|
||||||
if (m_ds != null)
|
|
||||||
{
|
{
|
||||||
try
|
log.config(toString());
|
||||||
{
|
|
||||||
//wait 5 seconds if pool is still busy
|
|
||||||
if (m_ds.getNumBusyConnections() > 0)
|
|
||||||
{
|
|
||||||
Thread.sleep(5 * 1000);
|
|
||||||
}
|
|
||||||
} catch (Exception e)
|
|
||||||
{
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try
|
try
|
||||||
|
@ -923,8 +787,6 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
{
|
{
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
|
||||||
m_ds = null;
|
|
||||||
} // close
|
} // close
|
||||||
|
|
||||||
|
|
||||||
|
@ -1079,45 +941,6 @@ public class DB_PostgreSQL implements AdempiereDatabase
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private int getIntProperty(Properties properties, String key, int defaultValue)
|
|
||||||
{
|
|
||||||
int i = defaultValue;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
String s = properties.getProperty(key);
|
|
||||||
if (s != null && s.trim().length() > 0)
|
|
||||||
i = Integer.parseInt(s);
|
|
||||||
}
|
|
||||||
catch (Exception e) {}
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean getBooleanProperty(Properties properties, String key, boolean defaultValue)
|
|
||||||
{
|
|
||||||
boolean b = defaultValue;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
String s = properties.getProperty(key);
|
|
||||||
if (s != null && s.trim().length() > 0)
|
|
||||||
b = Boolean.valueOf(s);
|
|
||||||
}
|
|
||||||
catch (Exception e) {}
|
|
||||||
return b;
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getStringProperty(Properties properties, String key, String defaultValue)
|
|
||||||
{
|
|
||||||
String b = defaultValue;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
String s = properties.getProperty(key);
|
|
||||||
if (s != null && s.trim().length() > 0)
|
|
||||||
b = s.trim();
|
|
||||||
}
|
|
||||||
catch (Exception e) {}
|
|
||||||
return b;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean forUpdate(PO po, int timeout) {
|
public boolean forUpdate(PO po, int timeout) {
|
||||||
//only can lock for update if using trx
|
//only can lock for update if using trx
|
||||||
|
|
Loading…
Reference in New Issue