diff options
author | Rob Daugherty <rd472p@att.com> | 2018-11-05 10:23:04 -0500 |
---|---|---|
committer | Rob Daugherty <rd472p@att.com> | 2018-11-05 10:33:13 -0500 |
commit | 95d22d7491ea365fdc4525e44d484c73c33e16a1 (patch) | |
tree | a5fdb7de3f14b345db760463cec8cee130b5b116 /src/main | |
parent | 8dca3aebe55502ab35402da6ec7123bd3de7694d (diff) |
Modularized mdbc build
The mdbc pom should inherit from oparent.
The top-level pom should not build the jar directly.
It should be a parent pom for mdbc submodules.
The first submodule will be called mdbc-server.
Note: the "mdbc" jar will now be called "mdbc-server".
Change-Id: I4456e659b7494641e5b3cefd540eb62a149b79a4
Issue-ID: MUSIC-175
Signed-off-by: Rob Daugherty <rd472p@att.com>
Diffstat (limited to 'src/main')
68 files changed, 0 insertions, 12194 deletions
diff --git a/src/main/java/org/onap/music/exceptions/MDBCServiceException.java b/src/main/java/org/onap/music/exceptions/MDBCServiceException.java deleted file mode 100644 index 9be84e5..0000000 --- a/src/main/java/org/onap/music/exceptions/MDBCServiceException.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * ============LICENSE_START========================================== - * org.onap.music - * =================================================================== - * Copyright (c) 2017 AT&T Intellectual Property - * =================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * ============LICENSE_END============================================= - * ==================================================================== - */ - -package org.onap.music.exceptions; - -/** - * @author inam - * - */ -public class MDBCServiceException extends Exception { - - - /** - * - */ - private static final long serialVersionUID = 1L; - private int errorCode; - private String errorMessage; - - public int getErrorCode() { - return errorCode; - } - - - public void setErrorCode(int errorCode) { - this.errorCode = errorCode; - } - - - public String getErrorMessage() { - return errorMessage; - } - - - public void setErrorMessage(String errorMessage) { - this.errorMessage = errorMessage; - } - - - public MDBCServiceException() { - super(); - } - - - public MDBCServiceException(String message) { - super(message); - - } - - - public MDBCServiceException(Throwable cause) { - super(cause); - - } - - - public MDBCServiceException(String message, Throwable cause) { - super(message, cause); - - } - - - public MDBCServiceException(String message, Throwable cause, boolean enableSuppression, - boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - - } - -} diff --git a/src/main/java/org/onap/music/exceptions/QueryException.java b/src/main/java/org/onap/music/exceptions/QueryException.java deleted file mode 100644 index 72a7cee..0000000 --- a/src/main/java/org/onap/music/exceptions/QueryException.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * ============LICENSE_START========================================== - * org.onap.music - * =================================================================== - * Copyright (c) 2017 AT&T Intellectual Property - * =================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * ============LICENSE_END============================================= - * ==================================================================== - */ -package org.onap.music.exceptions; - - - -/** - * @author inam - * - */ -public class QueryException extends Exception { - - /** - * - */ - private static final long serialVersionUID = 1L; - @SuppressWarnings("unused") - private int errorCode; - - - /** - * - */ - public QueryException() { - super(); - } - - /** - * @param message - */ - public QueryException(String message) { - super(message); - } - - - - /** - * @param message - */ - public QueryException(String message, int errorCode) { - super(message); - this.errorCode = errorCode; - } - - /** - * @param cause - */ - public QueryException(Throwable cause) { - super(cause); - } - - /** - * @param message - * @param cause - */ - public QueryException(String message, Throwable cause) { - super(message, cause); - } - - /** - * @param message - * @param cause - * @param enableSuppression - * @param writableStackTrace - */ - public QueryException(String message, Throwable cause, boolean enableSuppression, - boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - -} diff --git a/src/main/java/org/onap/music/logging/EELFLoggerDelegate.java b/src/main/java/org/onap/music/logging/EELFLoggerDelegate.java deleted file mode 100644 index 16a70dd..0000000 --- a/src/main/java/org/onap/music/logging/EELFLoggerDelegate.java +++ /dev/null @@ -1,339 +0,0 @@ - -package org.onap.music.logging; - -import static com.att.eelf.configuration.Configuration.MDC_SERVER_FQDN; -import static com.att.eelf.configuration.Configuration.MDC_SERVER_IP_ADDRESS; -import static com.att.eelf.configuration.Configuration.MDC_SERVICE_INSTANCE_ID; -import static com.att.eelf.configuration.Configuration.MDC_SERVICE_NAME; - -import java.net.InetAddress; -import java.text.MessageFormat; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -import javax.servlet.http.HttpServletRequest; - -import org.slf4j.MDC; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.eelf.configuration.SLF4jWrapper; - -public class EELFLoggerDelegate extends SLF4jWrapper implements EELFLogger { - - public static final EELFLogger errorLogger = EELFManager.getInstance().getErrorLogger(); - public static final EELFLogger applicationLogger = EELFManager.getInstance().getApplicationLogger(); - public static final EELFLogger auditLogger = EELFManager.getInstance().getAuditLogger(); - public static final EELFLogger metricsLogger = EELFManager.getInstance().getMetricsLogger(); - public static final EELFLogger debugLogger = EELFManager.getInstance().getDebugLogger(); - - private String className; - private static ConcurrentMap<String, EELFLoggerDelegate> classMap = new ConcurrentHashMap<>(); - - public EELFLoggerDelegate(final String className) { - super(className); - this.className = className; - } - - /** - * Convenience method that gets a logger for the specified class. - * - * @see #getLogger(String) - * - * @param clazz - * @return Instance of EELFLoggerDelegate - */ - public static EELFLoggerDelegate getLogger(Class<?> clazz) { - return getLogger(clazz.getName()); - } - - /** - * Gets a logger for the specified class name. If the logger does not already - * exist in the map, this creates a new logger. - * - * @param className - * If null or empty, uses EELFLoggerDelegate as the class name. - * @return Instance of EELFLoggerDelegate - */ - public static EELFLoggerDelegate getLogger(final String className) { - String classNameNeverNull = className == null || "".equals(className) ? EELFLoggerDelegate.class.getName() - : className; - EELFLoggerDelegate delegate = classMap.get(classNameNeverNull); - if (delegate == null) { - delegate = new EELFLoggerDelegate(className); - classMap.put(className, delegate); - } - return delegate; - } - - /** - * Logs a message at the lowest level: trace. - * - * @param logger - * @param msg - */ - public void trace(EELFLogger logger, String msg) { - if (logger.isTraceEnabled()) { - logger.trace(msg); - } - } - - /** - * Logs a message with parameters at the lowest level: trace. - * - * @param logger - * @param msg - * @param arguments - */ - public void trace(EELFLogger logger, String msg, Object... arguments) { - if (logger.isTraceEnabled()) { - logger.trace(msg, arguments); - } - } - - /** - * Logs a message and throwable at the lowest level: trace. - * - * @param logger - * @param msg - * @param th - */ - public void trace(EELFLogger logger, String msg, Throwable th) { - if (logger.isTraceEnabled()) { - logger.trace(msg, th); - } - } - - /** - * Logs a message at the second-lowest level: debug. - * - * @param logger - * @param msg - */ - public void debug(EELFLogger logger, String msg) { - if (logger.isDebugEnabled()) { - logger.debug(msg); - } - } - - /** - * Logs a message with parameters at the second-lowest level: debug. - * - * @param logger - * @param msg - * @param arguments - */ - public void debug(EELFLogger logger, String msg, Object... arguments) { - if (logger.isDebugEnabled()) { - logger.debug(msg, arguments); - } - } - - /** - * Logs a message and throwable at the second-lowest level: debug. - * - * @param logger - * @param msg - * @param th - */ - public void debug(EELFLogger logger, String msg, Throwable th) { - if (logger.isDebugEnabled()) { - logger.debug(msg, th); - } - } - - /** - * Logs a message at info level. - * - * @param logger - * @param msg - */ - public void info(EELFLogger logger, String msg) { - logger.info(className + " - "+msg); - } - - /** - * Logs a message with parameters at info level. - * - * @param logger - * @param msg - * @param arguments - */ - public void info(EELFLogger logger, String msg, Object... arguments) { - logger.info(msg, arguments); - } - - /** - * Logs a message and throwable at info level. - * - * @param logger - * @param msg - * @param th - */ - public void info(EELFLogger logger, String msg, Throwable th) { - logger.info(msg, th); - } - - /** - * Logs a message at warn level. - * - * @param logger - * @param msg - */ - public void warn(EELFLogger logger, String msg) { - logger.warn(msg); - } - - /** - * Logs a message with parameters at warn level. - * - * @param logger - * @param msg - * @param arguments - */ - public void warn(EELFLogger logger, String msg, Object... arguments) { - logger.warn(msg, arguments); - } - - /** - * Logs a message and throwable at warn level. - * - * @param logger - * @param msg - * @param th - */ - public void warn(EELFLogger logger, String msg, Throwable th) { - logger.warn(msg, th); - } - - /** - * Logs a message at error level. - * - * @param logger - * @param msg - */ - public void error(EELFLogger logger, String msg) { - logger.error(className+ " - " + msg); - } - - /** - * Logs a message with parameters at error level. - * - * @param logger - * @param msg - * @param arguments - */ - public void error(EELFLogger logger, String msg, Object... arguments) { - logger.error(msg, arguments); - } - - /** - * Logs a message and throwable at error level. - * - * @param logger - * @param msg - * @param th - */ - public void error(EELFLogger logger, String msg, Throwable th) { - logger.error(msg, th); - } - - /** - * Logs a message with the associated alarm severity at error level. - * - * @param logger - * @param msg - * @param severtiy - */ - public void error(EELFLogger logger, String msg, Object /*AlarmSeverityEnum*/ severtiy) { - logger.error(msg); - } - - /** - * Initializes the logger context. - */ - public void init() { - setGlobalLoggingContext(); - final String msg = "############################ Logging is started. ############################"; - // These loggers emit the current date-time without being told. - info(applicationLogger, msg); - error(errorLogger, msg); - debug(debugLogger, msg); - info(auditLogger, msg); - info(metricsLogger, msg); - } - - - /** - * Builds a message using a template string and the arguments. - * - * @param message - * @param args - * @return - */ - @SuppressWarnings("unused") - private String formatMessage(String message, Object... args) { - StringBuilder sbFormattedMessage = new StringBuilder(); - if (args != null && args.length > 0 && message != null && message != "") { - MessageFormat mf = new MessageFormat(message); - sbFormattedMessage.append(mf.format(args)); - } else { - sbFormattedMessage.append(message); - } - - return sbFormattedMessage.toString(); - } - - /** - * Loads all the default logging fields into the MDC context. - */ - private void setGlobalLoggingContext() { - MDC.put(MDC_SERVICE_INSTANCE_ID, ""); - try { - MDC.put(MDC_SERVER_FQDN, InetAddress.getLocalHost().getHostName()); - MDC.put(MDC_SERVER_IP_ADDRESS, InetAddress.getLocalHost().getHostAddress()); - } catch (Exception e) { - errorLogger.error("setGlobalLoggingContext failed", e); - } - } - - public static void mdcPut(String key, String value) { - MDC.put(key, value); - } - - public static String mdcGet(String key) { - return MDC.get(key); - } - - public static void mdcRemove(String key) { - MDC.remove(key); - } - - /** - * Loads the RequestId/TransactionId into the MDC which it should be receiving - * with an each incoming REST API request. Also, configures few other request - * based logging fields into the MDC context. - * - * @param req - * @param appName - */ - public void setRequestBasedDefaultsIntoGlobalLoggingContext(HttpServletRequest req, String appName) { - // Load the default fields - setGlobalLoggingContext(); - - // Load the request based fields - if (req != null) { - - - // Rest Path - MDC.put(MDC_SERVICE_NAME, req.getServletPath()); - - // Client IPAddress i.e. IPAddress of the remote host who is making - // this request. - String clientIPAddress = req.getHeader("X-FORWARDED-FOR"); - if (clientIPAddress == null) { - clientIPAddress = req.getRemoteAddr(); - } - } - } -} diff --git a/src/main/java/org/onap/music/logging/format/AppMessages.java b/src/main/java/org/onap/music/logging/format/AppMessages.java deleted file mode 100644 index 304719c..0000000 --- a/src/main/java/org/onap/music/logging/format/AppMessages.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * ============LICENSE_START========================================== - * org.onap.music - * =================================================================== - * Copyright (c) 2017 AT&T Intellectual Property - * =================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * ============LICENSE_END============================================= - * ==================================================================== - */ - -package org.onap.music.logging.format; - -/** - * @author inam - * - */ -public enum AppMessages { - - - - /* - * 100-199 Security/Permission Related - Authentication problems - * [ERR100E] Missing Information - * [ERR101E] Authentication error occured - * - * 200-299 Availability/Timeout Related/IO - connectivity error - connection timeout - * [ERR200E] Connectivity - * [ERR201E] Host not available - * [ERR202E] Error while connecting - * [ERR203E] IO Error has occured - * [ERR204E] Execution Interrupted - * [ERR205E] Session Expired - * - * - * - * 300-399 Data Access/Integrity Related - * [ERR300E] Incorrect data - * - * 400-499 - Cassandra Query Related - * - * - * 500-599 - Zookeepr/Locking Related - - * - * - * 600 - 699 - MDBC Service Errors - * [ERR600E] Error initializing the MDBC - * - * 700-799 Schema Interface Type/Validation - received Pay-load checksum is - * invalid - received JSON is not valid - * - * 800-899 Business/Flow Processing Related - check out to service is not - * allowed - Roll-back is done - failed to generate heat file - * - * - * 900-999 Unknown Errors - Unexpected exception - * [ERR900E] Unexpected error occured - * [ERR901E] Number format exception - * - * - * 1000-1099 Reserved - do not use - * - */ - - - - - MISSINGINFO("[ERR100E]", "Missing Information ","Details: NA", "Please check application credentials and/or headers"), - AUTHENTICATIONERROR("[ERR101E]", "Authentication error occured ","Details: NA", "Please verify application credentials"), - - CONNCECTIVITYERROR("[ERR200E]"," Connectivity error","Details: NA ","Please check connectivity to external resources"), - HOSTUNAVAILABLE("[ERR201E]","Host not available","Details: NA","Please verify the host details"), - IOERROR("[ERR203E]","IO Error has occured","","Please check IO"), - EXECUTIONINTERRUPTED("[ERR204E]"," Execution Interrupted","",""), - - - INCORRECTDATA("[ERR300E]"," Incorrect data",""," Please verify the request payload and try again"), - MULTIPLERECORDS("[ERR301E]"," Multiple records found",""," Please verify the request payload and try again"), - ALREADYEXIST("[ERR302E]"," Record already exist",""," Please verify the request payload and try again"), - MISSINGDATA("[ERR300E]"," Incorrect data",""," Please verify the request payload and try again"), - - QUERYERROR("[ERR400E]","Error while processing query",""," Please verify the query"), - - - UNKNOWNERROR("[ERR900E]"," Unexpected error occured",""," Please check logs for details"); - - - - ErrorTypes eType; - ErrorSeverity alarmSeverity; - ErrorSeverity errorSeverity; - String errorCode; - String errorDescription; - String details; - String resolution; - - - AppMessages(String errorCode, String errorDescription, String details,String resolution) { - - this.errorCode = errorCode; - this.errorDescription = errorDescription; - this.details = details; - this.resolution = resolution; - } - - - - - AppMessages(ErrorTypes eType, ErrorSeverity alarmSeverity, - ErrorSeverity errorSeverity, String errorCode, String errorDescription, String details, - String resolution) { - - this.eType = eType; - this.alarmSeverity = alarmSeverity; - this.errorSeverity = errorSeverity; - this.errorCode = errorCode; - this.errorDescription = errorDescription; - this.details = details; - this.resolution = resolution; - } - - public String getDetails() { - return this.details; - } - - public String getResolution() { - return this.resolution; - } - - public String getErrorCode() { - return this.errorCode; - } - - public String getErrorDescription() { - return this.errorDescription; - } - - - - - - - -} diff --git a/src/main/java/org/onap/music/logging/format/ErrorSeverity.java b/src/main/java/org/onap/music/logging/format/ErrorSeverity.java deleted file mode 100644 index 49cc2f4..0000000 --- a/src/main/java/org/onap/music/logging/format/ErrorSeverity.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * ============LICENSE_START========================================== - * org.onap.music - * =================================================================== - * Copyright (c) 2017 AT&T Intellectual Property - * =================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * ============LICENSE_END============================================= - * ==================================================================== - */ -package org.onap.music.logging.format; - -/** - * @author inam - * - */ -public enum ErrorSeverity { - INFO, - WARN, - ERROR, - FATAL, - CRITICAL, - MAJOR, - MINOR, - NONE, -} diff --git a/src/main/java/org/onap/music/logging/format/ErrorTypes.java b/src/main/java/org/onap/music/logging/format/ErrorTypes.java deleted file mode 100644 index 89dd5f8..0000000 --- a/src/main/java/org/onap/music/logging/format/ErrorTypes.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * ============LICENSE_START========================================== - * org.onap.music - * =================================================================== - * Copyright (c) 2017 AT&T Intellectual Property - * =================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * ============LICENSE_END============================================= - * ==================================================================== - */ -package org.onap.music.logging.format; - -import com.att.eelf.i18n.EELFResolvableErrorEnum; - -/** - * @author inam - * - */ -public enum ErrorTypes implements EELFResolvableErrorEnum { - - - CONNECTIONERROR, - SESSIONEXPIRED, - AUTHENTICATIONERROR, - SERVICEUNAVAILABLE, - QUERYERROR, - DATAERROR, - GENERALSERVICEERROR, - MUSICSERVICEERROR, - LOCKINGERROR, - UNKNOWN, - -} diff --git a/src/main/java/org/onap/music/mdbc/ArchiveProcess.java b/src/main/java/org/onap/music/mdbc/ArchiveProcess.java deleted file mode 100644 index 26a1ef3..0000000 --- a/src/main/java/org/onap/music/mdbc/ArchiveProcess.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.onap.music.mdbc; - -import org.json.JSONObject; - -import org.onap.music.mdbc.mixins.DBInterface; -import org.onap.music.mdbc.mixins.MusicInterface; - -public class ArchiveProcess { - protected MusicInterface mi; - protected DBInterface dbi; - - //TODO: This is a place holder for taking snapshots and moving data from redo record into actual tables - - /** - * This method is called whenever there is a DELETE on the transaction digest and should be called when ownership changes, if required - * It updates the MUSIC/Cassandra tables (both dirty bits and actual data) corresponding to the SQL DELETE. - * Music propagates it to the other replicas. - * @param tableName This is the table on which the select is being performed - * @param oldRow This is information about the row that is being deleted - */ - @SuppressWarnings("unused") - private void deleteFromEntityTableInMusic(String tableName, JSONObject oldRow) { - TableInfo ti = dbi.getTableInfo(tableName); - mi.deleteFromEntityTableInMusic(ti,tableName, oldRow); - } - - /** - * This method is called whenever there is an INSERT or UPDATE to a the transaction digest, and should be called by an - * ownership chance. It updates the MUSIC/Cassandra tables (both dirty bits and actual data) corresponding to the SQL write. - * Music propagates it to the other replicas. If the local database is in the middle of a transaction, the updates to MUSIC are - * delayed until the transaction is either committed or rolled back. - * - * @param tableName This is the table that has changed. - * @param changedRow This is information about the row that has changed, an array of objects representing the data being inserted/updated - */ - @SuppressWarnings("unused") - private void updateDirtyRowAndEntityTableInMusic(String tableName, JSONObject changedRow) { - //TODO: is this right? should we be saving updates at the client? we should leverage JDBC to handle this - TableInfo ti = dbi.getTableInfo(tableName); - mi.updateDirtyRowAndEntityTableInMusic(ti,tableName, changedRow); - } -} diff --git a/src/main/java/org/onap/music/mdbc/Configuration.java b/src/main/java/org/onap/music/mdbc/Configuration.java deleted file mode 100644 index a4516dd..0000000 --- a/src/main/java/org/onap/music/mdbc/Configuration.java +++ /dev/null @@ -1,18 +0,0 @@ -package org.onap.music.mdbc; - -public class Configuration { - /** The property name to use to connect to cassandra*/ - public static final String KEY_CASSANDRA_URL = "CASSANDRA_URL"; - /** The property name to use to enable/disable the MusicSqlManager entirely. */ - public static final String KEY_DISABLED = "disabled"; - /** The property name to use to select the DB 'mixin'. */ - public static final String KEY_DB_MIXIN_NAME = "MDBC_DB_MIXIN"; - /** The property name to use to select the MUSIC 'mixin'. */ - public static final String KEY_MUSIC_MIXIN_NAME = "MDBC_MUSIC_MIXIN"; - /** The name of the default mixin to use for the DBInterface. */ - public static final String DB_MIXIN_DEFAULT = "mysql";//"h2"; - /** The name of the default mixin to use for the MusicInterface. */ - public static final String MUSIC_MIXIN_DEFAULT = "cassandra2";//"cassandra2"; - /** Default cassandra ulr*/ - public static final String CASSANDRA_URL_DEFAULT = "localhost";//"cassandra2"; -} diff --git a/src/main/java/org/onap/music/mdbc/DatabaseOperations.java b/src/main/java/org/onap/music/mdbc/DatabaseOperations.java deleted file mode 100644 index c384199..0000000 --- a/src/main/java/org/onap/music/mdbc/DatabaseOperations.java +++ /dev/null @@ -1,316 +0,0 @@ -package org.onap.music.mdbc; - -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.TupleValue; -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.datastore.PreparedQueryObject; -import org.onap.music.exceptions.MusicLockingException; -import org.onap.music.exceptions.MusicQueryException; -import org.onap.music.exceptions.MusicServiceException; -import org.onap.music.main.MusicCore; -import org.onap.music.main.ResultType; -import org.onap.music.main.ReturnType; -import org.onap.music.mdbc.tables.MusicRangeInformationRow; -import org.onap.music.mdbc.tables.MusicTxDigestId; -import org.onap.music.mdbc.tables.PartitionInformation; -import org.onap.music.mdbc.tables.StagingTable; - -import java.io.IOException; -import java.util.*; - -import com.datastax.driver.core.utils.UUIDs; - -public class DatabaseOperations { - private static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(DatabaseOperations.class); - /** - * This functions is used to generate cassandra uuid - * @return a random UUID that can be used for fields of type uuid - */ - public static UUID generateUniqueKey() { - return UUIDs.random(); - } - - public static void createMusicTxDigest(String musicNamespace, String musicTxDigestTableName) - throws MDBCServiceException { - createMusicTxDigest(musicNamespace,musicTxDigestTableName,-1); - } - - /** - * This function creates the MusicTxDigest table. It contain information related to each transaction committed - * * LeaseId: id associated with the lease, text - * * LeaseCounter: transaction number under this lease, bigint \TODO this may need to be a varint later - * * TransactionDigest: text that contains all the changes in the transaction - */ - public static void createMusicTxDigest(String musicNamespace, String musicTxDigestTableName, - int musicTxDigestTableNumber) throws MDBCServiceException { - String tableName = musicTxDigestTableName; - if(musicTxDigestTableNumber >= 0) { - tableName = tableName + - "-" + - Integer.toString(musicTxDigestTableNumber); - } - String priKey = "txid"; - StringBuilder fields = new StringBuilder(); - fields.append("txid uuid, "); - fields.append("transactiondigest text ");//notice lack of ',' - String cql = String.format("CREATE TABLE IF NOT EXISTS %s.%s (%s, PRIMARY KEY (%s));", musicNamespace, tableName, fields, priKey); - try { - executeMusicWriteQuery(musicNamespace,tableName,cql); - } catch (MDBCServiceException e) { - logger.error("Initialization error: Failure to create redo records table"); - throw(e); - } - } - - /** - * This function creates the TransactionInformation table. It contain information related - * to the transactions happening in a given partition. - * * The schema of the table is - * * Id, uiid. - * * Partition, uuid id of the partition - * * LatestApplied, int indicates which values from the redologtable wast the last to be applied to the data tables - * * Applied: boolean, indicates if all the values in this redo log table where already applied to data tables - * * Redo: list of uiids associated to the Redo Records Table - * - */ - public static void createMusicRangeInformationTable(String musicNamespace, String musicRangeInformationTableName) throws MDBCServiceException { - String tableName = musicRangeInformationTableName; - String priKey = "rangeid"; - StringBuilder fields = new StringBuilder(); - fields.append("rangeid uuid, "); - fields.append("keys set<text>, "); - fields.append("ownerid text, "); - fields.append("metricprocessid text, "); - //TODO: Frozen is only needed for old versions of cassandra, please update correspondingly - fields.append("txredolog list<frozen<tuple<text,uuid>>> "); - String cql = String.format("CREATE TABLE IF NOT EXISTS %s.%s (%s, PRIMARY KEY (%s));", musicNamespace, tableName, fields, priKey); - try { - executeMusicWriteQuery(musicNamespace,tableName,cql); - } catch (MDBCServiceException e) { - logger.error("Initialization error: Failure to create transaction information table"); - throw(e); - } - } - - /** - * Creates a new empty tit row - * @param namespace namespace where the tit table is located - * @param mriTableName name of the corresponding mri table where the new row is added - * @param processId id of the process that is going to own initially this. - * @return uuid associated to the new row - */ - public static UUID createEmptyMriRow(String namespace, String mriTableName, - String processId, String lockId, List<Range> ranges) throws MDBCServiceException { - UUID id = generateUniqueKey(); - return createEmptyMriRow(namespace,mriTableName,id,processId,lockId,ranges); - } - - public static UUID createEmptyMriRow(String namespace, String mriTableName, UUID id, String processId, String lockId, - List<Range> ranges) throws MDBCServiceException{ - StringBuilder insert = new StringBuilder("INSERT INTO ") - .append(namespace) - .append('.') - .append(mriTableName) - .append(" (rangeid,keys,ownerid,metricprocessid,txredolog) VALUES ") - .append("(") - .append(id) - .append(",{"); - boolean first=true; - for(Range r: ranges){ - if(first){ first=false; } - else { - insert.append(','); - } - insert.append("'").append(r.toString()).append("'"); - } - insert.append("},'") - .append((lockId==null)?"":lockId) - .append("','") - .append(processId) - .append("',[]);"); - PreparedQueryObject query = new PreparedQueryObject(); - query.appendQueryString(insert.toString()); - try { - executeLockedPut(namespace,mriTableName,id.toString(),query,lockId,null); - } catch (MDBCServiceException e) { - logger.error("Initialization error: Failure to add new row to transaction information"); - throw new MDBCServiceException("Initialization error:Failure to add new row to transaction information"); - } - return id; - } - - public static MusicRangeInformationRow getMriRow(String namespace, String mriTableName, UUID id, String lockId) - throws MDBCServiceException{ - String cql = String.format("SELECT * FROM %s.%s WHERE rangeid = ?;", namespace, mriTableName); - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - pQueryObject.addValue(id); - Row newRow; - try { - newRow = executeLockedGet(namespace,mriTableName,pQueryObject,id.toString(),lockId); - } catch (MDBCServiceException e) { - logger.error("Get operationt error: Failure to get row from MRI "+mriTableName); - throw new MDBCServiceException("Initialization error:Failure to add new row to transaction information"); - } -// public MusicRangeInformationRow(UUID index, List<MusicTxDigestId> redoLog, PartitionInformation partition, - // String ownerId, String metricProcessId) { - List<TupleValue> log = newRow.getList("txredolog",TupleValue.class); - List<MusicTxDigestId> digestIds = new ArrayList<>(); - for(TupleValue t: log){ - //final String tableName = t.getString(0); - final UUID index = t.getUUID(1); - digestIds.add(new MusicTxDigestId(index)); - } - List<Range> partitions = new ArrayList<>(); - Set<String> tables = newRow.getSet("keys",String.class); - for (String table:tables){ - partitions.add(new Range(table)); - } - return new MusicRangeInformationRow(id,digestIds,new PartitionInformation(partitions),newRow.getString("ownerid"),newRow.getString("metricprocessid")); - - } - - public static HashMap<Range,StagingTable> getTransactionDigest(String namespace, String musicTxDigestTable, MusicTxDigestId id) - throws MDBCServiceException{ - String cql = String.format("SELECT * FROM %s.%s WHERE txid = ?;", namespace, musicTxDigestTable); - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - pQueryObject.addValue(id.tablePrimaryKey); - Row newRow; - try { - newRow = executeUnlockedQuorumGet(pQueryObject); - } catch (MDBCServiceException e) { - logger.error("Get operation error: Failure to get row from txdigesttable with id:"+id.tablePrimaryKey); - throw new MDBCServiceException("Initialization error:Failure to add new row to transaction information"); - } - String digest = newRow.getString("transactiondigest"); - HashMap<Range,StagingTable> changes; - try { - changes = (HashMap<Range, StagingTable>) MDBCUtils.fromString(digest); - } catch (IOException e) { - logger.error("IOException when deserializing digest failed with an invalid class for id:"+id.tablePrimaryKey); - throw new MDBCServiceException("Deserializng digest failed with ioexception"); - } catch (ClassNotFoundException e) { - logger.error("Deserializng digest failed with an invalid class for id:"+id.tablePrimaryKey); - throw new MDBCServiceException("Deserializng digest failed with an invalid class"); - } - return changes; - } - - /** - * This method executes a write query in Music - * @param cql the CQL to be sent to Cassandra - */ - protected static void executeMusicWriteQuery(String keyspace, String table, String cql) - throws MDBCServiceException { - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - ResultType rt = null; - try { - rt = MusicCore.createTable(keyspace,table,pQueryObject,"critical"); - } catch (MusicServiceException e) { - //\TODO: handle better, at least transform into an MDBCServiceException - e.printStackTrace(); - } - String result = rt.getResult(); - if (result==null || result.toLowerCase().equals("failure")) { - throw new MDBCServiceException("Music eventual put failed"); - } - } - - protected static Row executeLockedGet(String keyspace, String table, PreparedQueryObject cqlObject, String primaryKey, - String lock) - throws MDBCServiceException{ - ResultSet result; - try { - result = MusicCore.criticalGet(keyspace,table,primaryKey,cqlObject,lock); - } catch(MusicServiceException e){ - //\TODO: handle better, at least transform into an MDBCServiceException - e.printStackTrace(); - throw new MDBCServiceException("Error executing critical get"); - } - if(result.isExhausted()){ - throw new MDBCServiceException("There is not a row that matches the id "+primaryKey); - } - return result.one(); - } - - protected static Row executeUnlockedQuorumGet(PreparedQueryObject cqlObject) - throws MDBCServiceException{ - ResultSet result = MusicCore.quorumGet(cqlObject); - //\TODO: handle better, at least transform into an MDBCServiceException - if(result.isExhausted()){ - throw new MDBCServiceException("There is not a row that matches the query: ["+cqlObject.getQuery()+"]"); - } - return result.one(); - } - - protected static void executeLockedPut(String namespace, String tableName, - String primaryKeyWithoutDomain, PreparedQueryObject queryObject, String lockId, - MusicCore.Condition conditionInfo) throws MDBCServiceException { - ReturnType rt ; - if(lockId==null) { - try { - rt = MusicCore.atomicPut(namespace, tableName, primaryKeyWithoutDomain, queryObject, conditionInfo); - } catch (MusicLockingException e) { - logger.error("Music locked put failed"); - throw new MDBCServiceException("Music locked put failed"); - } catch (MusicServiceException e) { - logger.error("Music service fail: Music locked put failed"); - throw new MDBCServiceException("Music service fail: Music locked put failed"); - } catch (MusicQueryException e) { - logger.error("Music query fail: locked put failed"); - throw new MDBCServiceException("Music query fail: Music locked put failed"); - } - } - else { - rt = MusicCore.criticalPut(namespace, tableName, primaryKeyWithoutDomain, queryObject, lockId, conditionInfo); - } - if (rt.getResult().getResult().toLowerCase().equals("failure")) { - throw new MDBCServiceException("Music locked put failed"); - } - } - - public static void createNamespace(String namespace, int replicationFactor) throws MDBCServiceException { - Map<String,Object> replicationInfo = new HashMap<>(); - replicationInfo.put("'class'", "'SimpleStrategy'"); - replicationInfo.put("'replication_factor'", replicationFactor); - - PreparedQueryObject queryObject = new PreparedQueryObject(); - queryObject.appendQueryString( - "CREATE KEYSPACE " + namespace + " WITH REPLICATION = " + replicationInfo.toString().replaceAll("=", ":")); - - try { - MusicCore.nonKeyRelatedPut(queryObject, "critical"); - } catch (MusicServiceException e) { - if (!e.getMessage().equals("Keyspace "+namespace+" already exists")) { - logger.error("Error creating namespace: "+namespace); - throw new MDBCServiceException("Error creating namespace: "+namespace+". Internal error:"+e.getErrorMessage()); - } - } - } - - public static void createTxDigestRow(String namespace, String musicTxDigestTable, MusicTxDigestId newId, String transactionDigest) throws MDBCServiceException { - PreparedQueryObject query = new PreparedQueryObject(); - String cqlQuery = "INSERT INTO " + - namespace + - '.' + - musicTxDigestTable + - " (txid,transactiondigest) " + - "VALUES (" + - newId.tablePrimaryKey + ",'" + - transactionDigest + - "');"; - query.appendQueryString(cqlQuery); - //\TODO check if I am not shooting on my own foot - try { - MusicCore.nonKeyRelatedPut(query,"critical"); - } catch (MusicServiceException e) { - logger.error(EELFLoggerDelegate.errorLogger, "Transaction Digest serialization was invalid for commit "+newId.tablePrimaryKey.toString()+ "with error "+e.getErrorMessage()); - throw new MDBCServiceException("Transaction Digest serialization for commit "+newId.tablePrimaryKey.toString()); - } - } - -} diff --git a/src/main/java/org/onap/music/mdbc/DatabasePartition.java b/src/main/java/org/onap/music/mdbc/DatabasePartition.java deleted file mode 100644 index 5d91dca..0000000 --- a/src/main/java/org/onap/music/mdbc/DatabasePartition.java +++ /dev/null @@ -1,171 +0,0 @@ -package org.onap.music.mdbc; - -import java.io.BufferedReader; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.util.*; - -import org.onap.music.logging.EELFLoggerDelegate; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.onap.music.mdbc.tables.MriReference; - -/** - * A database range contain information about what ranges should be hosted in the current MDBC instance - * A database range with an empty map, is supposed to contain all the tables in Music. - * @author Enrique Saurez - */ -public class DatabasePartition { - private transient static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(DatabasePartition.class); - - private String musicRangeInformationTable;//Table that currently contains the REDO log for this partition - private UUID musicRangeInformationIndex;//Index that can be obtained either from - private String musicTxDigestTable; - private String lockId; - protected List<Range> ranges; - - /** - * Each range represents a partition of the database, a database partition is a union of this partitions. - * The only requirement is that the ranges are not overlapping. - */ - - public DatabasePartition() { - ranges = new ArrayList<>(); - } - - public DatabasePartition(List<Range> knownRanges, UUID mriIndex, String mriTable, String lockId, String musicTxDigestTable) { - if(knownRanges != null) { - ranges = knownRanges; - } - else { - ranges = new ArrayList<>(); - } - - if(musicTxDigestTable != null) { - this.setMusicTxDigestTable(musicTxDigestTable); - } - else{ - this.setMusicTxDigestTable(""); - } - - if(mriIndex != null) { - this.setMusicRangeInformationIndex(mriIndex); - } - else { - this.setMusicRangeInformationIndex(null); - } - - if(mriTable != null) { - this.setMusicRangeInformationTable(mriTable); - } - else { - this.setMusicRangeInformationTable(""); - } - - if(lockId != null) { - this.setLockId(lockId); - } - else { - this.setLockId(""); - } - } - - public String getMusicRangeInformationTable() { - return musicRangeInformationTable; - } - - public void setMusicRangeInformationTable(String musicRangeInformationTable) { - this.musicRangeInformationTable = musicRangeInformationTable; - } - - public MriReference getMusicRangeInformationIndex() { - return new MriReference(musicRangeInformationTable,musicRangeInformationIndex); - } - - public void setMusicRangeInformationIndex(UUID musicRangeInformationIndex) { - this.musicRangeInformationIndex = musicRangeInformationIndex; - } - - /** - * Add a new range to the ones own by the local MDBC - * @param newRange range that is being added - * @throws IllegalArgumentException - */ - public synchronized void addNewRange(Range newRange) { - //Check overlap - for(Range r : ranges) { - if(r.overlaps(newRange)) { - throw new IllegalArgumentException("Range is already contain by a previous range"); - } - } - ranges.add(newRange); - } - - /** - * Delete a range that is being modified - * @param rangeToDel limits of the range - */ - public synchronized void deleteRange(Range rangeToDel) { - if(!ranges.contains(rangeToDel)) { - logger.error(EELFLoggerDelegate.errorLogger,"Range doesn't exist"); - throw new IllegalArgumentException("Invalid table"); - } - ranges.remove(rangeToDel); - } - - /** - * Get all the ranges that are currently owned - * @return ranges - */ - public synchronized Range[] getSnapshot() { - return (Range[]) ranges.toArray(); - } - - /** - * Serialize the ranges - * @return serialized ranges - */ - public String toJson() { - GsonBuilder builder = new GsonBuilder(); - builder.setPrettyPrinting().serializeNulls();; - Gson gson = builder.create(); - return gson.toJson(this); - } - - /** - * Function to obtain the configuration - * @param filepath path to the database range - * @return a new object of type DatabaseRange - * @throws FileNotFoundException - */ - - public static DatabasePartition readJsonFromFile( String filepath) throws FileNotFoundException { - BufferedReader br; - try { - br = new BufferedReader( - new FileReader(filepath)); - } catch (FileNotFoundException e) { - logger.error(EELFLoggerDelegate.errorLogger,"File was not found when reading json"+e); - throw e; - } - Gson gson = new Gson(); - DatabasePartition range = gson.fromJson(br, DatabasePartition.class); - return range; - } - - public String getLockId() { - return lockId; - } - - public void setLockId(String lockId) { - this.lockId = lockId; - } - - public String getMusicTxDigestTable() { - return musicTxDigestTable; - } - - public void setMusicTxDigestTable(String musicTxDigestTable) { - this.musicTxDigestTable = musicTxDigestTable; - } -} diff --git a/src/main/java/org/onap/music/mdbc/LockId.java b/src/main/java/org/onap/music/mdbc/LockId.java deleted file mode 100644 index 9401f26..0000000 --- a/src/main/java/org/onap/music/mdbc/LockId.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.onap.music.mdbc; - -public class LockId { - private String primaryKey; - private String domain; - private String lockReference; - - public LockId(String primaryKey, String domain, String lockReference){ - this.primaryKey = primaryKey; - this.domain = domain; - if(lockReference == null) { - this.lockReference = ""; - } - else{ - this.lockReference = lockReference; - } - } - - public String getFullyQualifiedLockKey(){ - return this.domain+"."+this.primaryKey; - } - - public String getPrimaryKey() { - return primaryKey; - } - - public void setPrimaryKey(String primaryKey) { - this.primaryKey = primaryKey; - } - - public String getDomain() { - return domain; - } - - public void setDomain(String domain) { - this.domain = domain; - } - - public String getLockReference() { - return lockReference; - } - - public void setLockReference(String lockReference) { - this.lockReference = lockReference; - } -} diff --git a/src/main/java/org/onap/music/mdbc/MDBCUtils.java b/src/main/java/org/onap/music/mdbc/MDBCUtils.java deleted file mode 100644 index 2e150bd..0000000 --- a/src/main/java/org/onap/music/mdbc/MDBCUtils.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.onap.music.mdbc; - -import java.io.*; -import java.util.Base64; -import java.util.Deque; -import java.util.HashMap; - -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.logging.format.AppMessages; -import org.onap.music.logging.format.ErrorSeverity; -import org.onap.music.logging.format.ErrorTypes; -import org.onap.music.mdbc.tables.Operation; -import org.onap.music.mdbc.tables.StagingTable; - -import javassist.bytecode.Descriptor.Iterator; - -import org.apache.commons.lang3.tuple.Pair; -import org.json.JSONObject; - -public class MDBCUtils { - /** Write the object to a Base64 string. */ - public static String toString( Serializable o ) throws IOException { - //TODO We may want to also compress beside serialize - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try { - ObjectOutputStream oos = new ObjectOutputStream(baos); - oos.writeObject(o); - oos.close(); - return Base64.getEncoder().encodeToString(baos.toByteArray()); - } - finally{ - baos.close(); - } - } - - public static String toString( JSONObject o) throws IOException { - //TODO We may want to also compress beside serialize - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - ObjectOutputStream oos = new ObjectOutputStream( baos ); - oos.writeObject( o ); - oos.close(); - return Base64.getEncoder().encodeToString(baos.toByteArray()); - } - - /** Read the object from Base64 string. */ - public static Object fromString( String s ) throws IOException , - ClassNotFoundException { - byte [] data = Base64.getDecoder().decode( s ); - ObjectInputStream ois = new ObjectInputStream( - new ByteArrayInputStream( data ) ); - Object o = ois.readObject(); - ois.close(); - return o; - } - - public static void saveToFile(String serializedContent, String filename, EELFLoggerDelegate logger) throws IOException { - try (PrintWriter fout = new PrintWriter(filename)) { - fout.println(serializedContent); - } catch (FileNotFoundException e) { - if(logger!=null){ - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(), AppMessages.IOERROR, ErrorTypes.UNKNOWN, ErrorSeverity.CRITICAL); - } - else { - e.printStackTrace(); - } - throw e; - } - } - -} diff --git a/src/main/java/org/onap/music/mdbc/MdbcCallableStatement.java b/src/main/java/org/onap/music/mdbc/MdbcCallableStatement.java deleted file mode 100644 index 95a49a8..0000000 --- a/src/main/java/org/onap/music/mdbc/MdbcCallableStatement.java +++ /dev/null @@ -1,738 +0,0 @@ -package org.onap.music.mdbc; - -import java.io.InputStream; -import java.io.Reader; -import java.math.BigDecimal; -import java.net.URL; -import java.sql.Array; -import java.sql.Blob; -import java.sql.CallableStatement; -import java.sql.Clob; -import java.sql.Date; -import java.sql.NClob; -import java.sql.ParameterMetaData; -import java.sql.Ref; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLXML; -import java.sql.Statement; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.Calendar; -import java.util.Map; - -import org.onap.music.logging.EELFLoggerDelegate; - -/** - * ProxyStatement is a proxy Statement that front ends Statements from the underlying JDBC driver. It passes all operations through, - * and invokes the MusicSqlManager when there is the possibility that database tables have been created or dropped. - * - * @author Robert Eby - */ -public class MdbcCallableStatement extends MdbcPreparedStatement implements CallableStatement { - private EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(MdbcCallableStatement.class); - @SuppressWarnings("unused") - private static final String DATASTAX_PREFIX = "com.datastax.driver"; - - public MdbcCallableStatement(Statement stmt, MusicSqlManager m) { - super(stmt, m); - } - - public MdbcCallableStatement(Statement stmt, String sql, MusicSqlManager mgr) { - super(stmt, sql, mgr); - } - - @Override - public <T> T unwrap(Class<T> iface) throws SQLException { - logger.error(EELFLoggerDelegate.errorLogger, "proxystatement unwrap: " + iface.getName()); - return stmt.unwrap(iface); - } - - @Override - public boolean isWrapperFor(Class<?> iface) throws SQLException { - logger.error(EELFLoggerDelegate.errorLogger, "proxystatement isWrapperFor: " + iface.getName()); - return stmt.isWrapperFor(iface); - } - - @Override - public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - ((CallableStatement)stmt).setTimestamp(parameterIndex, x, cal); - } - - @Override - public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - ((CallableStatement)stmt).setNull(parameterIndex, sqlType, typeName); - } - - @Override - public void setURL(int parameterIndex, URL x) throws SQLException { - ((CallableStatement)stmt).setURL(parameterIndex, x); - } - - @Override - public ParameterMetaData getParameterMetaData() throws SQLException { - return ((CallableStatement)stmt).getParameterMetaData(); - } - - @Override - public void setRowId(int parameterIndex, RowId x) throws SQLException { - ((CallableStatement)stmt).setRowId(parameterIndex, x); - } - - @Override - public void setNString(int parameterIndex, String value) throws SQLException { - ((CallableStatement)stmt).setNString(parameterIndex, value); - } - - @Override - public void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterIndex, value, length); - } - - @Override - public void setNClob(int parameterIndex, NClob value) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterIndex, value); - } - - @Override - public void setClob(int parameterIndex, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setClob(parameterIndex, reader, length); - } - - @Override - public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterIndex, inputStream, length); - } - - @Override - public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterIndex, reader, length); - } - - @Override - public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { - ((CallableStatement)stmt).setSQLXML(parameterIndex, xmlObject); - } - - @Override - public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - ((CallableStatement)stmt).setObject(parameterIndex, x, targetSqlType, scaleOrLength); - } - - @Override - public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterIndex, x, length); - } - - @Override - public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterIndex, x, length); - } - - @Override - public void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterIndex, reader, length); - } - - @Override - public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterIndex, x); - } - - @Override - public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterIndex, x); - } - - @Override - public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterIndex, reader); - } - - @Override - public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterIndex, value); - } - - @Override - public void setClob(int parameterIndex, Reader reader) throws SQLException { - ((CallableStatement)stmt).setClob(parameterIndex, reader); - } - - @Override - public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterIndex, inputStream); - } - - @Override - public void setNClob(int parameterIndex, Reader reader) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterIndex, reader); - } - - @Override - public void registerOutParameter(int parameterIndex, int sqlType) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterIndex, sqlType); - } - - @Override - public void registerOutParameter(int parameterIndex, int sqlType, int scale) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterIndex, sqlType, scale); - } - - @Override - public boolean wasNull() throws SQLException { - return ((CallableStatement)stmt).wasNull(); - } - - @Override - public String getString(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getString(parameterIndex); - } - - @Override - public boolean getBoolean(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getBoolean(parameterIndex); - } - - @Override - public byte getByte(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getByte(parameterIndex); - } - - @Override - public short getShort(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getShort(parameterIndex); - } - - @Override - public int getInt(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getInt(parameterIndex); - } - - @Override - public long getLong(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getLong(parameterIndex); - } - - @Override - public float getFloat(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getFloat(parameterIndex); - } - - @Override - public double getDouble(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getDouble(parameterIndex); - } - - @SuppressWarnings("deprecation") - @Override - public BigDecimal getBigDecimal(int parameterIndex, int scale) throws SQLException { - return ((CallableStatement)stmt).getBigDecimal(parameterIndex, scale); - } - - @Override - public byte[] getBytes(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getBytes(parameterIndex); - } - - @Override - public Date getDate(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getDate(parameterIndex); - } - - @Override - public Time getTime(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getTime(parameterIndex); - } - - @Override - public Timestamp getTimestamp(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getTimestamp(parameterIndex); - } - - @Override - public Object getObject(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterIndex); - } - - @Override - public BigDecimal getBigDecimal(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getBigDecimal(parameterIndex); - } - - @Override - public Object getObject(int parameterIndex, Map<String, Class<?>> map) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterIndex, map); - } - - @Override - public Ref getRef(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getRef(parameterIndex); - } - - @Override - public Blob getBlob(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getBlob(parameterIndex); - } - - @Override - public Clob getClob(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getClob(parameterIndex); - } - - @Override - public Array getArray(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getArray(parameterIndex); - } - - @Override - public Date getDate(int parameterIndex, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getDate(parameterIndex, cal); - } - - @Override - public Time getTime(int parameterIndex, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getTime(parameterIndex, cal); - } - - @Override - public Timestamp getTimestamp(int parameterIndex, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getTimestamp(parameterIndex, cal); - } - - @Override - public void registerOutParameter(int parameterIndex, int sqlType, String typeName) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterIndex, sqlType, typeName); - } - - @Override - public void registerOutParameter(String parameterName, int sqlType) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterName, sqlType); - } - - @Override - public void registerOutParameter(String parameterName, int sqlType, int scale) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterName, sqlType, scale); - } - - @Override - public void registerOutParameter(String parameterName, int sqlType, String typeName) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterName, sqlType, typeName); - } - - @Override - public URL getURL(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getURL(parameterIndex); - } - - @Override - public void setURL(String parameterName, URL val) throws SQLException { - ((CallableStatement)stmt).setURL(parameterName, val); - } - - @Override - public void setNull(String parameterName, int sqlType) throws SQLException { - ((CallableStatement)stmt).setNull(parameterName, sqlType); - } - - @Override - public void setBoolean(String parameterName, boolean x) throws SQLException { - ((CallableStatement)stmt).setBoolean(parameterName, x); - } - - @Override - public void setByte(String parameterName, byte x) throws SQLException { - ((CallableStatement)stmt).setByte(parameterName, x); - } - - @Override - public void setShort(String parameterName, short x) throws SQLException { - ((CallableStatement)stmt).setShort(parameterName, x); - } - - @Override - public void setInt(String parameterName, int x) throws SQLException { - ((CallableStatement)stmt).setInt(parameterName, x); - } - - @Override - public void setLong(String parameterName, long x) throws SQLException { - ((CallableStatement)stmt).setLong(parameterName, x); - } - - @Override - public void setFloat(String parameterName, float x) throws SQLException { - ((CallableStatement)stmt).setFloat(parameterName, x); - } - - @Override - public void setDouble(String parameterName, double x) throws SQLException { - ((CallableStatement)stmt).setDouble(parameterName, x); - } - - @Override - public void setBigDecimal(String parameterName, BigDecimal x) throws SQLException { - ((CallableStatement)stmt).setBigDecimal(parameterName, x); - } - - @Override - public void setString(String parameterName, String x) throws SQLException { - ((CallableStatement)stmt).setString(parameterName, x); - } - - @Override - public void setBytes(String parameterName, byte[] x) throws SQLException { - ((CallableStatement)stmt).setBytes(parameterName, x); - } - - @Override - public void setDate(String parameterName, Date x) throws SQLException { - ((CallableStatement)stmt).setDate(parameterName, x); - } - - @Override - public void setTime(String parameterName, Time x) throws SQLException { - ((CallableStatement)stmt).setTime(parameterName, x); - } - - @Override - public void setTimestamp(String parameterName, Timestamp x) throws SQLException { - ((CallableStatement)stmt).setTimestamp(parameterName, x); - } - - @Override - public void setAsciiStream(String parameterName, InputStream x, int length) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterName, x, length); - } - - @Override - public void setBinaryStream(String parameterName, InputStream x, int length) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterName, x, length); - } - - @Override - public void setObject(String parameterName, Object x, int targetSqlType, int scale) throws SQLException { - ((CallableStatement)stmt).setObject(parameterName, x, targetSqlType, scale); - } - - @Override - public void setObject(String parameterName, Object x, int targetSqlType) throws SQLException { - ((CallableStatement)stmt).setObject(parameterName, x, targetSqlType); - } - - @Override - public void setObject(String parameterName, Object x) throws SQLException { - ((CallableStatement)stmt).setObject(parameterName, x); - } - - @Override - public void setCharacterStream(String parameterName, Reader reader, int length) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterName, reader, length); - } - - @Override - public void setDate(String parameterName, Date x, Calendar cal) throws SQLException { - ((CallableStatement)stmt).setDate(parameterName, x, cal); - } - - @Override - public void setTime(String parameterName, Time x, Calendar cal) throws SQLException { - ((CallableStatement)stmt).setTime(parameterName, x, cal); - } - - @Override - public void setTimestamp(String parameterName, Timestamp x, Calendar cal) throws SQLException { - ((CallableStatement)stmt).setTimestamp(parameterName, x, cal); - } - - @Override - public void setNull(String parameterName, int sqlType, String typeName) throws SQLException { - ((CallableStatement)stmt).setNull(parameterName, sqlType, typeName); - } - - @Override - public String getString(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getString(parameterName); - } - - @Override - public boolean getBoolean(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getBoolean(parameterName); - } - - @Override - public byte getByte(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getByte(parameterName); - } - - @Override - public short getShort(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getShort(parameterName); - } - - @Override - public int getInt(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getInt(parameterName); - } - - @Override - public long getLong(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getLong(parameterName); - } - - @Override - public float getFloat(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getFloat(parameterName); - } - - @Override - public double getDouble(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getDouble(parameterName); - } - - @Override - public byte[] getBytes(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getBytes(parameterName); - } - - @Override - public Date getDate(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getDate(parameterName); - } - - @Override - public Time getTime(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getTime(parameterName); - } - - @Override - public Timestamp getTimestamp(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getTimestamp(parameterName); - } - - @Override - public Object getObject(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterName); - } - - @Override - public BigDecimal getBigDecimal(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getBigDecimal(parameterName); - } - - @Override - public Object getObject(String parameterName, Map<String, Class<?>> map) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterName, map); - } - - @Override - public Ref getRef(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getRef(parameterName); - } - - @Override - public Blob getBlob(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getBlob(parameterName); - } - - @Override - public Clob getClob(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getClob(parameterName); - } - - @Override - public Array getArray(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getArray(parameterName); - } - - @Override - public Date getDate(String parameterName, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getDate(parameterName, cal); - } - - @Override - public Time getTime(String parameterName, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getTime(parameterName, cal); - } - - @Override - public Timestamp getTimestamp(String parameterName, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getTimestamp(parameterName, cal); - } - - @Override - public URL getURL(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getURL(parameterName); - } - - @Override - public RowId getRowId(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getRowId(parameterIndex); - } - - @Override - public RowId getRowId(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getRowId(parameterName); - } - - @Override - public void setRowId(String parameterName, RowId x) throws SQLException { - ((CallableStatement)stmt).setRowId(parameterName, x); - } - - @Override - public void setNString(String parameterName, String value) throws SQLException { - ((CallableStatement)stmt).setNString(parameterName, value); - } - - @Override - public void setNCharacterStream(String parameterName, Reader value, long length) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterName, value, length); - } - - @Override - public void setNClob(String parameterName, NClob value) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterName, value); - } - - @Override - public void setClob(String parameterName, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setClob(parameterName, reader, length); - } - - @Override - public void setBlob(String parameterName, InputStream inputStream, long length) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterName, inputStream, length); - } - - @Override - public void setNClob(String parameterName, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterName, reader, length); - } - - @Override - public NClob getNClob(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getNClob(parameterIndex); - } - - @Override - public NClob getNClob(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getNClob(parameterName); - } - - @Override - public void setSQLXML(String parameterName, SQLXML xmlObject) throws SQLException { - ((CallableStatement)stmt).setSQLXML(parameterName, xmlObject); - } - - @Override - public SQLXML getSQLXML(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getSQLXML(parameterIndex); - } - - @Override - public SQLXML getSQLXML(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getSQLXML(parameterName); - } - - @Override - public String getNString(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getNString(parameterIndex); - } - - @Override - public String getNString(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getNString(parameterName); - } - - @Override - public Reader getNCharacterStream(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getNCharacterStream(parameterIndex); - } - - @Override - public Reader getNCharacterStream(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getNCharacterStream(parameterName); - } - - @Override - public Reader getCharacterStream(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getCharacterStream(parameterIndex); - } - - @Override - public Reader getCharacterStream(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getCharacterStream(parameterName); - } - - @Override - public void setBlob(String parameterName, Blob x) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterName, x); - } - - @Override - public void setClob(String parameterName, Clob x) throws SQLException { - ((CallableStatement)stmt).setClob(parameterName, x); - } - - @Override - public void setAsciiStream(String parameterName, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterName, x, length); - } - - @Override - public void setBinaryStream(String parameterName, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterName, x, length); - } - - @Override - public void setCharacterStream(String parameterName, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterName, reader, length); - } - - @Override - public void setAsciiStream(String parameterName, InputStream x) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterName, x); - } - - @Override - public void setBinaryStream(String parameterName, InputStream x) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterName, x); - } - - @Override - public void setCharacterStream(String parameterName, Reader reader) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterName, reader); - } - - @Override - public void setNCharacterStream(String parameterName, Reader value) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterName, value); - } - - @Override - public void setClob(String parameterName, Reader reader) throws SQLException { - ((CallableStatement)stmt).setClob(parameterName, reader); - } - - @Override - public void setBlob(String parameterName, InputStream inputStream) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterName, inputStream); - } - - @Override - public void setNClob(String parameterName, Reader reader) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterName, reader); - } - - @Override - public <T> T getObject(int parameterIndex, Class<T> type) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterIndex, type); - } - - @Override - public <T> T getObject(String parameterName, Class<T> type) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterName, type); - } - -} diff --git a/src/main/java/org/onap/music/mdbc/MdbcConnection.java b/src/main/java/org/onap/music/mdbc/MdbcConnection.java deleted file mode 100644 index b553fb5..0000000 --- a/src/main/java/org/onap/music/mdbc/MdbcConnection.java +++ /dev/null @@ -1,419 +0,0 @@ -package org.onap.music.mdbc; - -import java.sql.Array; -import java.sql.Blob; -import java.sql.CallableStatement; -import java.sql.Clob; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.NClob; -import java.sql.PreparedStatement; -import java.sql.SQLClientInfoException; -import java.sql.SQLException; -import java.sql.SQLWarning; -import java.sql.SQLXML; -import java.sql.Savepoint; -import java.sql.Statement; -import java.sql.Struct; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.Executor; - -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.exceptions.QueryException; -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.logging.format.AppMessages; -import org.onap.music.logging.format.ErrorSeverity; -import org.onap.music.logging.format.ErrorTypes; -import org.onap.music.mdbc.mixins.MusicInterface; -import org.onap.music.mdbc.tables.TxCommitProgress; - - -/** - * ProxyConnection is a proxy to a JDBC driver Connection. It uses the MusicSqlManager to copy - * data to and from Cassandra and the underlying JDBC database as needed. It will notify the underlying - * MusicSqlManager of any calls to <code>commit(), rollback()</code> or <code>setAutoCommit()</code>. - * Otherwise it just forwards all requests to the underlying Connection of the 'real' database. - * - * @author Robert Eby - */ -public class MdbcConnection implements Connection { - private static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(MdbcConnection.class); - - private final String id; // This is the transaction id, assigned to this connection. There is no need to change the id, if connection is reused - private final Connection conn; // the JDBC Connection to the actual underlying database - private final MusicSqlManager mgr; // there should be one MusicSqlManager in use per Connection - private final TxCommitProgress progressKeeper; - private final DatabasePartition partition; - - public MdbcConnection(String id, String url, Connection c, Properties info, MusicInterface mi, TxCommitProgress progressKeeper, DatabasePartition partition) throws MDBCServiceException { - this.id = id; - if (c == null) { - throw new MDBCServiceException("Connection is null"); - } - this.conn = c; - try { - this.mgr = new MusicSqlManager(url, c, info, mi); - } catch (MDBCServiceException e) { - logger.error("Failure in creating Music SQL Manager"); - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw e; - } - try { - this.mgr.setAutoCommit(c.getAutoCommit(),null,null,null); - } catch (SQLException e) { - logger.error("Failure in autocommit"); - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - } - - // Verify the tables in MUSIC match the tables in the database - // and create triggers on any tables that need them - //mgr.synchronizeTableData(); - if ( mgr != null ) try { - mgr.synchronizeTables(); - } catch (QueryException e) { - logger.error("Error syncrhonizing tables"); - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - } - else { - logger.error(EELFLoggerDelegate.errorLogger, "MusicSqlManager was not correctly created", AppMessages.UNKNOWNERROR, ErrorTypes.UNKNOWN, ErrorSeverity.FATAL); - throw new MDBCServiceException("Music SQL Manager object is null or invalid"); - } - this.progressKeeper = progressKeeper; - this.partition = partition; - logger.debug("Mdbc connection created with id: "+id); - } - - @Override - public <T> T unwrap(Class<T> iface) throws SQLException { - logger.error(EELFLoggerDelegate.errorLogger, "proxyconn unwrap: " + iface.getName()); - return conn.unwrap(iface); - } - - @Override - public boolean isWrapperFor(Class<?> iface) throws SQLException { - logger.error(EELFLoggerDelegate.errorLogger, "proxystatement iswrapperfor: " + iface.getName()); - return conn.isWrapperFor(iface); - } - - @Override - public Statement createStatement() throws SQLException { - return new MdbcCallableStatement(conn.createStatement(), mgr); - } - - @Override - public PreparedStatement prepareStatement(String sql) throws SQLException { - //TODO: grab the sql call from here and all the other preparestatement calls - return new MdbcPreparedStatement(conn.prepareStatement(sql), sql, mgr); - } - - @Override - public CallableStatement prepareCall(String sql) throws SQLException { - return new MdbcCallableStatement(conn.prepareCall(sql), mgr); - } - - @Override - public String nativeSQL(String sql) throws SQLException { - return conn.nativeSQL(sql); - } - - @Override - public void setAutoCommit(boolean autoCommit) throws SQLException { - boolean b = conn.getAutoCommit(); - if (b != autoCommit) { - if(progressKeeper!=null) progressKeeper.commitRequested(id); - try { - mgr.setAutoCommit(autoCommit,id,progressKeeper,partition); - if(progressKeeper!=null) - progressKeeper.setMusicDone(id); - } catch (MDBCServiceException e) { - logger.error(EELFLoggerDelegate.errorLogger, "Commit to music failed", AppMessages.UNKNOWNERROR, ErrorTypes.UNKNOWN, ErrorSeverity.FATAL); - throw new SQLException("Failure commiting to MUSIC"); - } - conn.setAutoCommit(autoCommit); - if(progressKeeper!=null) { - progressKeeper.setSQLDone(id); - } - if(progressKeeper!=null&&progressKeeper.isComplete(id)){ - progressKeeper.reinitializeTxProgress(id); - } - } - } - - @Override - public boolean getAutoCommit() throws SQLException { - return conn.getAutoCommit(); - } - - @Override - public void commit() throws SQLException { - if(progressKeeper.isComplete(id)) { - return; - } - if(progressKeeper != null) { - progressKeeper.commitRequested(id); - } - - try { - mgr.commit(id,progressKeeper,partition); - } catch (MDBCServiceException e) { - //If the commit fail, then a new commitId should be used - logger.error(EELFLoggerDelegate.errorLogger, "Commit to music failed", AppMessages.UNKNOWNERROR, ErrorTypes.UNKNOWN, ErrorSeverity.FATAL); - throw new SQLException("Failure commiting to MUSIC"); - } - - if(progressKeeper != null) { - progressKeeper.setMusicDone(id); - } - - conn.commit(); - - if(progressKeeper != null) { - progressKeeper.setSQLDone(id); - } - //MusicMixin.releaseZKLocks(MusicMixin.currentLockMap.get(getConnID())); - if(progressKeeper.isComplete(id)){ - progressKeeper.reinitializeTxProgress(id); - } - } - - @Override - public void rollback() throws SQLException { - mgr.rollback(); - conn.rollback(); - progressKeeper.reinitializeTxProgress(id); - } - - @Override - public void close() throws SQLException { - logger.debug("Closing mdbc connection with id:"+id); - if (mgr != null) { - logger.debug("Closing mdbc manager with id:"+id); - mgr.close(); - } - if (conn != null && !conn.isClosed()) { - logger.debug("Closing jdbc from mdbc with id:"+id); - conn.close(); - logger.debug("Connection was closed for id:" + id); - } - } - - @Override - public boolean isClosed() throws SQLException { - return conn.isClosed(); - } - - @Override - public DatabaseMetaData getMetaData() throws SQLException { - return conn.getMetaData(); - } - - @Override - public void setReadOnly(boolean readOnly) throws SQLException { - conn.setReadOnly(readOnly); - } - - @Override - public boolean isReadOnly() throws SQLException { - return conn.isReadOnly(); - } - - @Override - public void setCatalog(String catalog) throws SQLException { - conn.setCatalog(catalog); - } - - @Override - public String getCatalog() throws SQLException { - return conn.getCatalog(); - } - - @Override - public void setTransactionIsolation(int level) throws SQLException { - conn.setTransactionIsolation(level); - } - - @Override - public int getTransactionIsolation() throws SQLException { - return conn.getTransactionIsolation(); - } - - @Override - public SQLWarning getWarnings() throws SQLException { - return conn.getWarnings(); - } - - @Override - public void clearWarnings() throws SQLException { - conn.clearWarnings(); - } - - @Override - public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { - return new MdbcCallableStatement(conn.createStatement(resultSetType, resultSetConcurrency), mgr); - } - - @Override - public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) - throws SQLException { - return new MdbcCallableStatement(conn.prepareStatement(sql, resultSetType, resultSetConcurrency), sql, mgr); - } - - @Override - public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - return new MdbcCallableStatement(conn.prepareCall(sql, resultSetType, resultSetConcurrency), mgr); - } - - @Override - public Map<String, Class<?>> getTypeMap() throws SQLException { - return conn.getTypeMap(); - } - - @Override - public void setTypeMap(Map<String, Class<?>> map) throws SQLException { - conn.setTypeMap(map); - } - - @Override - public void setHoldability(int holdability) throws SQLException { - conn.setHoldability(holdability); - } - - @Override - public int getHoldability() throws SQLException { - return conn.getHoldability(); - } - - @Override - public Savepoint setSavepoint() throws SQLException { - return conn.setSavepoint(); - } - - @Override - public Savepoint setSavepoint(String name) throws SQLException { - return conn.setSavepoint(name); - } - - @Override - public void rollback(Savepoint savepoint) throws SQLException { - conn.rollback(savepoint); - } - - @Override - public void releaseSavepoint(Savepoint savepoint) throws SQLException { - conn.releaseSavepoint(savepoint); - } - - @Override - public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) - throws SQLException { - return new MdbcCallableStatement(conn.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability), mgr); - } - - @Override - public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, - int resultSetHoldability) throws SQLException { - return new MdbcCallableStatement(conn.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability), sql, mgr); - } - - @Override - public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, - int resultSetHoldability) throws SQLException { - return new MdbcCallableStatement(conn.prepareCall(sql, resultSetType, resultSetConcurrency, resultSetHoldability), mgr); - } - - @Override - public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { - return new MdbcPreparedStatement(conn.prepareStatement(sql, autoGeneratedKeys), sql, mgr); - } - - @Override - public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { - return new MdbcPreparedStatement(conn.prepareStatement(sql, columnIndexes), sql, mgr); - } - - @Override - public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { - return new MdbcPreparedStatement(conn.prepareStatement(sql, columnNames), sql, mgr); - } - - @Override - public Clob createClob() throws SQLException { - return conn.createClob(); - } - - @Override - public Blob createBlob() throws SQLException { - return conn.createBlob(); - } - - @Override - public NClob createNClob() throws SQLException { - return conn.createNClob(); - } - - @Override - public SQLXML createSQLXML() throws SQLException { - return conn.createSQLXML(); - } - - @Override - public boolean isValid(int timeout) throws SQLException { - return conn.isValid(timeout); - } - - @Override - public void setClientInfo(String name, String value) throws SQLClientInfoException { - conn.setClientInfo(name, value); - } - - @Override - public void setClientInfo(Properties properties) throws SQLClientInfoException { - conn.setClientInfo(properties); - } - - @Override - public String getClientInfo(String name) throws SQLException { - return conn.getClientInfo(name); - } - - @Override - public Properties getClientInfo() throws SQLException { - return conn.getClientInfo(); - } - - @Override - public Array createArrayOf(String typeName, Object[] elements) throws SQLException { - return conn.createArrayOf(typeName, elements); - } - - @Override - public Struct createStruct(String typeName, Object[] attributes) throws SQLException { - return conn.createStruct(typeName, attributes); - } - - @Override - public void setSchema(String schema) throws SQLException { - conn.setSchema(schema); - } - - @Override - public String getSchema() throws SQLException { - return conn.getSchema(); - } - - @Override - public void abort(Executor executor) throws SQLException { - conn.abort(executor); - } - - @Override - public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { - conn.setNetworkTimeout(executor, milliseconds); - } - - @Override - public int getNetworkTimeout() throws SQLException { - return conn.getNetworkTimeout(); - } -} diff --git a/src/main/java/org/onap/music/mdbc/MdbcPreparedStatement.java b/src/main/java/org/onap/music/mdbc/MdbcPreparedStatement.java deleted file mode 100644 index e781b4b..0000000 --- a/src/main/java/org/onap/music/mdbc/MdbcPreparedStatement.java +++ /dev/null @@ -1,743 +0,0 @@ -package org.onap.music.mdbc; - -import java.io.InputStream; -import java.io.Reader; -import java.math.BigDecimal; -import java.net.URL; -import java.sql.Array; -import java.sql.Blob; -import java.sql.CallableStatement; -import java.sql.Clob; -import java.sql.Connection; -import java.sql.Date; -import java.sql.NClob; -import java.sql.ParameterMetaData; -import java.sql.PreparedStatement; -import java.sql.Ref; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLWarning; -import java.sql.SQLXML; -import java.sql.Statement; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.Calendar; - -import org.apache.commons.lang3.StringUtils; - -import org.onap.music.logging.EELFLoggerDelegate; - -/** - * ProxyStatement is a proxy Statement that front ends Statements from the underlying JDBC driver. It passes all operations through, - * and invokes the MusicSqlManager when there is the possibility that database tables have been created or dropped. - * - * @author Robert Eby - */ -public class MdbcPreparedStatement extends MdbcStatement implements PreparedStatement { - private EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(MdbcPreparedStatement.class); - private static final String DATASTAX_PREFIX = "com.datastax.driver"; - - final String sql; // holds the sql statement if prepared statement - String[] params; // holds the parameters if prepared statement, indexing starts at 1 - - - public MdbcPreparedStatement(Statement stmt, MusicSqlManager m) { - super(stmt, m); - this.sql = null; - } - - public MdbcPreparedStatement(Statement stmt, String sql, MusicSqlManager mgr) { - super(stmt, sql, mgr); - this.sql = sql; - //indexing starts at 1 - params = new String[StringUtils.countMatches(sql, "?")+1]; - } - - @Override - public <T> T unwrap(Class<T> iface) throws SQLException { - return stmt.unwrap(iface); - } - - @Override - public boolean isWrapperFor(Class<?> iface) throws SQLException { - return stmt.isWrapperFor(iface); - } - - @Override - public ResultSet executeQuery(String sql) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeQuery: "+sql); - ResultSet r = null; - try { - mgr.preStatementHook(sql); - r = stmt.executeQuery(sql); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger, "executeQuery: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return r; - } - - @Override - public int executeUpdate(String sql) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeUpdate: "+sql); - - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger, "executeUpdate: exception "+nm+" "+e); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public void close() throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"Statement close: "); - stmt.close(); - } - - @Override - public int getMaxFieldSize() throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"getMaxFieldSize"); - return stmt.getMaxFieldSize(); - } - - @Override - public void setMaxFieldSize(int max) throws SQLException { - stmt.setMaxFieldSize(max); - } - - @Override - public int getMaxRows() throws SQLException { - return stmt.getMaxRows(); - } - - @Override - public void setMaxRows(int max) throws SQLException { - stmt.setMaxRows(max); - } - - @Override - public void setEscapeProcessing(boolean enable) throws SQLException { - stmt.setEscapeProcessing(enable); - } - - @Override - public int getQueryTimeout() throws SQLException { - return stmt.getQueryTimeout(); - } - - @Override - public void setQueryTimeout(int seconds) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"setQueryTimeout seconds "+ seconds); - stmt.setQueryTimeout(seconds); - } - - @Override - public void cancel() throws SQLException { - stmt.cancel(); - } - - @Override - public SQLWarning getWarnings() throws SQLException { - return stmt.getWarnings(); - } - - @Override - public void clearWarnings() throws SQLException { - stmt.clearWarnings(); - } - - @Override - public void setCursorName(String name) throws SQLException { - stmt.setCursorName(name); - } - - @Override - public boolean execute(String sql) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger, "execute: exception "+nm+" "+e); - // Note: this seems to be the only call Camunda uses, so it is the only one I am fixing for now. - boolean ignore = nm.startsWith(DATASTAX_PREFIX); -// ignore |= (nm.startsWith("org.h2.jdbc.JdbcSQLException") && e.getMessage().contains("already exists")); - if (ignore) { - logger.warn("execute: exception (IGNORED) "+nm); - } else { - logger.error(EELFLoggerDelegate.errorLogger, " Exception "+nm+" "+e); - throw e; - } - } - return b; - } - - @Override - public ResultSet getResultSet() throws SQLException { - return stmt.getResultSet(); - } - - @Override - public int getUpdateCount() throws SQLException { - return stmt.getUpdateCount(); - } - - @Override - public boolean getMoreResults() throws SQLException { - return stmt.getMoreResults(); - } - - @Override - public void setFetchDirection(int direction) throws SQLException { - stmt.setFetchDirection(direction); - } - - @Override - public int getFetchDirection() throws SQLException { - return stmt.getFetchDirection(); - } - - @Override - public void setFetchSize(int rows) throws SQLException { - stmt.setFetchSize(rows); - } - - @Override - public int getFetchSize() throws SQLException { - return stmt.getFetchSize(); - } - - @Override - public int getResultSetConcurrency() throws SQLException { - return stmt.getResultSetConcurrency(); - } - - @Override - public int getResultSetType() throws SQLException { - return stmt.getResultSetType(); - } - - @Override - public void addBatch(String sql) throws SQLException { - stmt.addBatch(sql); - } - - @Override - public void clearBatch() throws SQLException { - stmt.clearBatch(); - } - - @Override - public int[] executeBatch() throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeBatch: "); - int[] n = null; - try { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeBatch() is not supported by MDBC; your results may be incorrect as a result."); - n = stmt.executeBatch(); - synchronizeTables(null); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeBatch: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public Connection getConnection() throws SQLException { - return stmt.getConnection(); - } - - @Override - public boolean getMoreResults(int current) throws SQLException { - return stmt.getMoreResults(current); - } - - @Override - public ResultSet getGeneratedKeys() throws SQLException { - return stmt.getGeneratedKeys(); - } - - @Override - public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql, autoGeneratedKeys); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql, columnIndexes); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public int executeUpdate(String sql, String[] columnNames) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql, columnNames); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql, autoGeneratedKeys); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"execute: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return b; - } - - @Override - public boolean execute(String sql, int[] columnIndexes) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql, columnIndexes); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"execute: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return b; - } - - @Override - public boolean execute(String sql, String[] columnNames) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql, columnNames); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"execute: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return b; - } - - @Override - public int getResultSetHoldability() throws SQLException { - return stmt.getResultSetHoldability(); - } - - @Override - public boolean isClosed() throws SQLException { - return stmt.isClosed(); - } - - @Override - public void setPoolable(boolean poolable) throws SQLException { - stmt.setPoolable(poolable); - } - - @Override - public boolean isPoolable() throws SQLException { - return stmt.isPoolable(); - } - - @Override - public void closeOnCompletion() throws SQLException { - stmt.closeOnCompletion(); - } - - @Override - public boolean isCloseOnCompletion() throws SQLException { - return stmt.isCloseOnCompletion(); - } - - @Override - public ResultSet executeQuery() throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeQuery: "+sql); - ResultSet r = null; - try { - mgr.preStatementHook(sql); - r = ((PreparedStatement)stmt).executeQuery();; - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - e.printStackTrace(); - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeQuery: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return r; - } - - @Override - public int executeUpdate() throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = ((PreparedStatement)stmt).executeUpdate(); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - e.printStackTrace(); - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public void setNull(int parameterIndex, int sqlType) throws SQLException { - ((PreparedStatement)stmt).setNull(parameterIndex, sqlType); - } - - @Override - public void setBoolean(int parameterIndex, boolean x) throws SQLException { - ((PreparedStatement)stmt).setBoolean(parameterIndex, x); - } - - @Override - public void setByte(int parameterIndex, byte x) throws SQLException { - ((PreparedStatement)stmt).setByte(parameterIndex, x); - } - - @Override - public void setShort(int parameterIndex, short x) throws SQLException { - ((PreparedStatement)stmt).setShort(parameterIndex, x); - } - - @Override - public void setInt(int parameterIndex, int x) throws SQLException { - ((PreparedStatement)stmt).setInt(parameterIndex, x); - } - - @Override - public void setLong(int parameterIndex, long x) throws SQLException { - ((PreparedStatement)stmt).setLong(parameterIndex, x); - } - - @Override - public void setFloat(int parameterIndex, float x) throws SQLException { - ((PreparedStatement)stmt).setFloat(parameterIndex, x); - } - - @Override - public void setDouble(int parameterIndex, double x) throws SQLException { - ((PreparedStatement)stmt).setDouble(parameterIndex, x); - } - - @Override - public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - ((PreparedStatement)stmt).setBigDecimal(parameterIndex, x); - } - - @Override - public void setString(int parameterIndex, String x) throws SQLException { - ((PreparedStatement)stmt).setString(parameterIndex, x); - params[parameterIndex] = x; - } - - @Override - public void setBytes(int parameterIndex, byte[] x) throws SQLException { - ((PreparedStatement)stmt).setBytes(parameterIndex, x); - } - - @Override - public void setDate(int parameterIndex, Date x) throws SQLException { - ((PreparedStatement)stmt).setDate(parameterIndex, x); - } - - @Override - public void setTime(int parameterIndex, Time x) throws SQLException { - ((PreparedStatement)stmt).setTime(parameterIndex, x); - } - - @Override - public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { - ((PreparedStatement)stmt).setTimestamp(parameterIndex, x); - } - - @Override - public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException { - ((PreparedStatement)stmt).setAsciiStream(parameterIndex, x, length); - } - - @SuppressWarnings("deprecation") - @Override - public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException { - ((PreparedStatement)stmt).setUnicodeStream(parameterIndex, x, length); - } - - @Override - public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException { - ((PreparedStatement)stmt).setBinaryStream(parameterIndex, x, length); - } - - @Override - public void clearParameters() throws SQLException { - ((PreparedStatement)stmt).clearParameters(); - } - - @Override - public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException { - ((PreparedStatement)stmt).setObject(parameterIndex, x, targetSqlType); - } - - @Override - public void setObject(int parameterIndex, Object x) throws SQLException { - ((PreparedStatement)stmt).setObject(parameterIndex, x); - } - - @Override - public boolean execute() throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = ((PreparedStatement)stmt).execute(); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - e.printStackTrace(); - String nm = e.getClass().getName(); - // Note: this seems to be the only call Camunda uses, so it is the only one I am fixing for now. - boolean ignore = nm.startsWith(DATASTAX_PREFIX); -// ignore |= (nm.startsWith("org.h2.jdbc.JdbcSQLException") && e.getMessage().contains("already exists")); - if (ignore) { - logger.warn("execute: exception (IGNORED) "+nm); - } else { - logger.error(EELFLoggerDelegate.errorLogger,"execute: exception "+nm); - throw e; - } - } - return b; - } - - @Override - public void addBatch() throws SQLException { - ((PreparedStatement)stmt).addBatch(); - } - - @Override - public void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException { - ((PreparedStatement)stmt).setCharacterStream(parameterIndex, reader, length); - } - - @Override - public void setRef(int parameterIndex, Ref x) throws SQLException { - ((PreparedStatement)stmt).setRef(parameterIndex, x); - } - - @Override - public void setBlob(int parameterIndex, Blob x) throws SQLException { - ((PreparedStatement)stmt).setBlob(parameterIndex, x); - } - - @Override - public void setClob(int parameterIndex, Clob x) throws SQLException { - ((PreparedStatement)stmt).setClob(parameterIndex, x); - } - - @Override - public void setArray(int parameterIndex, Array x) throws SQLException { - ((PreparedStatement)stmt).setArray(parameterIndex, x); - } - - @Override - public ResultSetMetaData getMetaData() throws SQLException { - return ((PreparedStatement)stmt).getMetaData(); - } - - @Override - public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - ((PreparedStatement)stmt).setDate(parameterIndex, x, cal); - } - - @Override - public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - ((PreparedStatement)stmt).setTime(parameterIndex, x, cal); - } - - @Override - public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - ((CallableStatement)stmt).setTimestamp(parameterIndex, x, cal); - } - - @Override - public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - ((CallableStatement)stmt).setNull(parameterIndex, sqlType, typeName); - } - - @Override - public void setURL(int parameterIndex, URL x) throws SQLException { - ((CallableStatement)stmt).setURL(parameterIndex, x); - } - - @Override - public ParameterMetaData getParameterMetaData() throws SQLException { - return ((CallableStatement)stmt).getParameterMetaData(); - } - - @Override - public void setRowId(int parameterIndex, RowId x) throws SQLException { - ((CallableStatement)stmt).setRowId(parameterIndex, x); - } - - @Override - public void setNString(int parameterIndex, String value) throws SQLException { - ((CallableStatement)stmt).setNString(parameterIndex, value); - } - - @Override - public void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterIndex, value, length); - } - - @Override - public void setNClob(int parameterIndex, NClob value) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterIndex, value); - } - - @Override - public void setClob(int parameterIndex, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setClob(parameterIndex, reader, length); - } - - @Override - public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterIndex, inputStream, length); - } - - @Override - public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterIndex, reader, length); - } - - @Override - public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { - ((CallableStatement)stmt).setSQLXML(parameterIndex, xmlObject); - } - - @Override - public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - ((CallableStatement)stmt).setObject(parameterIndex, x, targetSqlType, scaleOrLength); - } - - @Override - public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterIndex, x, length); - } - - @Override - public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterIndex, x, length); - } - - @Override - public void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterIndex, reader, length); - } - - @Override - public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterIndex, x); - } - - @Override - public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterIndex, x); - } - - @Override - public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterIndex, reader); - } - - @Override - public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterIndex, value); - } - - @Override - public void setClob(int parameterIndex, Reader reader) throws SQLException { - ((CallableStatement)stmt).setClob(parameterIndex, reader); - } - - @Override - public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterIndex, inputStream); - } - - @Override - public void setNClob(int parameterIndex, Reader reader) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterIndex, reader); - } - -} diff --git a/src/main/java/org/onap/music/mdbc/MdbcServer.java b/src/main/java/org/onap/music/mdbc/MdbcServer.java deleted file mode 100644 index 4f83a54..0000000 --- a/src/main/java/org/onap/music/mdbc/MdbcServer.java +++ /dev/null @@ -1,162 +0,0 @@ -package org.onap.music.mdbc; - -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import org.onap.music.mdbc.configurations.NodeConfiguration; -import org.apache.calcite.avatica.remote.Driver.Serialization; -import org.apache.calcite.avatica.remote.LocalService; -import org.apache.calcite.avatica.server.HttpServer; -import org.apache.calcite.avatica.util.Unsafe; - -import org.onap.music.logging.EELFLoggerDelegate; -import com.beust.jcommander.IStringConverter; -import com.beust.jcommander.JCommander; -import com.beust.jcommander.Parameter; - -import java.util.Locale; -import java.util.Properties; - -public class MdbcServer { - public static final EELFLoggerDelegate LOG = EELFLoggerDelegate.getLogger(MdbcStatement.class); - - @Parameter(names = { "-c", "--configuration" }, required = true, - description = "This is the file that contains the ranges that are assigned to this MDBC server") - private String configurationFile; - - @Parameter(names = { "-u", "--url" }, required = true, - description = "JDBC driver url for the server") - private String url; - - @Parameter(names = { "-p", "--port" }, required = true, - description = "Port the server should bind") - private int port; - - @Parameter(names = { "-s", "--user" }, required = true, - description = "Mysql usr") - private String user; - - @Parameter(names = { "-a", "--pass" }, required = true, - description = "Mysql password") - private String password; - - final private Serialization serialization = Serialization.PROTOBUF; - - @Parameter(names = { "-h", "-help", "--help" }, help = true, - description = "Print the help message") - private boolean help = false; - - private NodeConfiguration config; - private HttpServer server; - - public void start() { - if (null != server) { - LOG.error("The server was already started"); - Unsafe.systemExit(ExitCodes.ALREADY_STARTED.ordinal()); - return; - } - - try { - config = NodeConfiguration.readJsonFromFile(configurationFile); - //\TODO Add configuration file with Server Info - Properties connectionProps = new Properties(); - connectionProps.put("user", user); - connectionProps.put("password", password); - MdbcServerLogic meta = new MdbcServerLogic(url,connectionProps,config); - LocalService service = new LocalService(meta); - - // Construct the server - this.server = new HttpServer.Builder<>() - .withHandler(service, serialization) - .withPort(port) - .build(); - - // Then start it - server.start(); - - LOG.info("Started Avatica server on port {} with serialization {}", server.getPort(), - serialization); - } catch (Exception e) { - LOG.error("Failed to start Avatica server", e); - Unsafe.systemExit(ExitCodes.START_FAILED.ordinal()); - } - } - - public void stop() { - if (null != server) { - server.stop(); - server = null; - } - } - - public void join() throws InterruptedException { - server.join(); - } - - public static void main(String[] args) { - final MdbcServer server = new MdbcServer(); - @SuppressWarnings("deprecation") - JCommander jc = new JCommander(server, args); - if (server.help) { - jc.usage(); - Unsafe.systemExit(ExitCodes.USAGE.ordinal()); - return; - } - - server.start(); - - // Try to clean up when the server is stopped. - Runtime.getRuntime().addShutdownHook( - new Thread(new Runnable() { - @Override public void run() { - LOG.info("Stopping server"); - server.stop(); - LOG.info("Server stopped"); - } - })); - - try { - server.join(); - } catch (InterruptedException e) { - // Reset interruption - Thread.currentThread().interrupt(); - // And exit now. - return; - } - } - - /** - * Converter from String to Serialization. Must be public for JCommander. - */ - public static class SerializationConverter implements IStringConverter<Serialization> { - @Override public Serialization convert(String value) { - return Serialization.valueOf(value.toUpperCase(Locale.ROOT)); - } - } - - /** - * Codes for exit conditions - */ - private enum ExitCodes { - NORMAL, - ALREADY_STARTED, // 1 - START_FAILED, // 2 - USAGE; // 3 - } -} - -// End StandaloneServer.java diff --git a/src/main/java/org/onap/music/mdbc/MdbcServerLogic.java b/src/main/java/org/onap/music/mdbc/MdbcServerLogic.java deleted file mode 100644 index a1984c2..0000000 --- a/src/main/java/org/onap/music/mdbc/MdbcServerLogic.java +++ /dev/null @@ -1,312 +0,0 @@ -package org.onap.music.mdbc; - -import java.sql.Connection; -import java.sql.SQLException; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.TimeUnit; - -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.mdbc.configurations.NodeConfiguration; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.RemovalListener; -import com.google.common.cache.RemovalNotification; -import org.apache.calcite.avatica.MissingResultsException; -import org.apache.calcite.avatica.NoSuchStatementException; -import org.apache.calcite.avatica.jdbc.JdbcMeta; -import org.apache.calcite.avatica.remote.TypedValue; - -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.logging.format.AppMessages; -import org.onap.music.logging.format.ErrorSeverity; -import org.onap.music.logging.format.ErrorTypes; - -public class MdbcServerLogic extends JdbcMeta{ - - private static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(MdbcServerLogic.class); - - StateManager manager; - DatabasePartition ranges; - String name; - String sqlDatabase; - - //TODO: Delete this properties after debugging - private final Properties info; - private final Cache<String, Connection> connectionCache; - - public MdbcServerLogic(String Url, Properties info,NodeConfiguration config) throws SQLException, MDBCServiceException { - super(Url,info); - this.ranges = config.partition; - this.name = config.nodeName; - this.sqlDatabase = config.sqlDatabaseName; - this.manager = new StateManager(Url,info,this.ranges,this.sqlDatabase); - this.info = info; - int concurrencyLevel = Integer.parseInt( - info.getProperty(ConnectionCacheSettings.CONCURRENCY_LEVEL.key(), - ConnectionCacheSettings.CONCURRENCY_LEVEL.defaultValue())); - int initialCapacity = Integer.parseInt( - info.getProperty(ConnectionCacheSettings.INITIAL_CAPACITY.key(), - ConnectionCacheSettings.INITIAL_CAPACITY.defaultValue())); - long maxCapacity = Long.parseLong( - info.getProperty(ConnectionCacheSettings.MAX_CAPACITY.key(), - ConnectionCacheSettings.MAX_CAPACITY.defaultValue())); - long connectionExpiryDuration = Long.parseLong( - info.getProperty(ConnectionCacheSettings.EXPIRY_DURATION.key(), - ConnectionCacheSettings.EXPIRY_DURATION.defaultValue())); - TimeUnit connectionExpiryUnit = TimeUnit.valueOf( - info.getProperty(ConnectionCacheSettings.EXPIRY_UNIT.key(), - ConnectionCacheSettings.EXPIRY_UNIT.defaultValue())); - this.connectionCache = CacheBuilder.newBuilder() - .concurrencyLevel(concurrencyLevel) - .initialCapacity(initialCapacity) - .maximumSize(maxCapacity) - .expireAfterAccess(connectionExpiryDuration, connectionExpiryUnit) - .removalListener(new ConnectionExpiryHandler()) - .build(); - } - - @Override - protected Connection getConnection(String id) throws SQLException { - if (id == null) { - throw new NullPointerException("Connection id is null"); - } - //\TODO: don't use connectionCache, use this.manager internal state - Connection conn = connectionCache.getIfPresent(id); - if (conn == null) { - this.manager.CloseConnection(id); - logger.error(EELFLoggerDelegate.errorLogger,"Connection not found: invalid id, closed, or expired: " - + id); - throw new RuntimeException(" Connection not found: invalid id, closed, or expired: " + id); - } - return conn; - } - - @Override - public void openConnection(ConnectionHandle ch, Map<String, String> information) { - Properties fullInfo = new Properties(); - fullInfo.putAll(this.info); - if (information != null) { - fullInfo.putAll(information); - } - - final ConcurrentMap<String, Connection> cacheAsMap = this.connectionCache.asMap(); - if (cacheAsMap.containsKey(ch.id)) { - throw new RuntimeException("Connection already exists: " + ch.id); - } - // Avoid global synchronization of connection opening - try { - this.manager.OpenConnection(ch.id, info); - Connection conn = this.manager.GetConnection(ch.id); - if(conn == null) { - logger.error(EELFLoggerDelegate.errorLogger, "Connection created was null"); - throw new RuntimeException("Connection created was null for connection: " + ch.id); - } - Connection loadedConn = cacheAsMap.putIfAbsent(ch.id, conn); - logger.info("connection created with id {}", ch.id); - // Race condition: someone beat us to storing the connection in the cache. - if (loadedConn != null) { - //\TODO check if we added an additional race condition for this - this.manager.CloseConnection(ch.id); - conn.close(); - throw new RuntimeException("Connection already exists: " + ch.id); - } - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw new RuntimeException(e); - } - } - - @Override - public void closeConnection(ConnectionHandle ch) { - //\TODO use state connection instead - Connection conn = connectionCache.getIfPresent(ch.id); - if (conn == null) { - logger.debug("client requested close unknown connection {}", ch); - return; - } - logger.trace("closing connection {}", ch); - try { - conn.close(); - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw new RuntimeException(e.getMessage()); - } finally { - connectionCache.invalidate(ch.id); - this.manager.CloseConnection(ch.id); - logger.info("connection closed with id {}", ch.id); - } - } - - @Override - public void commit(ConnectionHandle ch) { - try { - super.commit(ch); - logger.debug("connection commited with id {}", ch.id); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - } - - //\TODO All the following functions can be deleted - // Added for two reasons: debugging and logging - @Override - public StatementHandle prepare(ConnectionHandle ch, String sql, long maxRowCount) { - StatementHandle h; - try { - h = super.prepare(ch, sql, maxRowCount); - logger.debug("prepared statement {}", h); - } catch (Exception e ) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(e); - } - return h; - } - - @Override - public ExecuteResult prepareAndExecute(StatementHandle h, String sql, long maxRowCount, int maxRowsInFirstFrame, - PrepareCallback callback) throws NoSuchStatementException { - ExecuteResult e; - try { - e = super.prepareAndExecute(h, sql, maxRowCount,maxRowsInFirstFrame,callback); - logger.debug("prepare and execute statement {}", h); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - return e; - } - - @Override - public ExecuteBatchResult prepareAndExecuteBatch(StatementHandle h, List<String> sqlCommands) - throws NoSuchStatementException { - ExecuteBatchResult e; - try { - e = super.prepareAndExecuteBatch(h, sqlCommands); - logger.debug("prepare and execute batch statement {}", h); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - return e; - } - - @Override - public ExecuteBatchResult executeBatch(StatementHandle h, List<List<TypedValue>> parameterValues) - throws NoSuchStatementException { - ExecuteBatchResult e; - try { - e = super.executeBatch(h, parameterValues); - logger.debug("execute batch statement {}", h); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - return e; - } - - @Override - public Frame fetch(StatementHandle h, long offset, int fetchMaxRowCount) - throws NoSuchStatementException, MissingResultsException { - Frame f; - try { - f = super.fetch(h, offset, fetchMaxRowCount); - logger.debug("fetch statement {}", h); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - return f; - } - - @Override - public ExecuteResult execute(StatementHandle h, List<TypedValue> parameterValues, long maxRowCount) - throws NoSuchStatementException { - ExecuteResult e; - try { - e = super.execute(h, parameterValues, maxRowCount); - logger.debug("fetch statement {}", h); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - return e; - } - - @Override - public ExecuteResult execute(StatementHandle h, List<TypedValue> parameterValues, int maxRowsInFirstFrame) - throws NoSuchStatementException { - ExecuteResult e; - try { - e = super.execute(h, parameterValues, maxRowsInFirstFrame); - logger.debug("fetch statement {}", h); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - return e; - } - - @Override - public StatementHandle createStatement(ConnectionHandle ch) { - StatementHandle h; - try { - h = super.createStatement(ch); - logger.debug("create statement {}", h); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - return h; - } - - @Override - public void closeStatement(StatementHandle h) { - try { - super.closeStatement(h); - logger.debug("statement closed {}", h); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - } - - - - - - - - @Override - public void rollback(ConnectionHandle ch) { - try { - super.rollback(ch); - logger.debug("connection rollback with id {}", ch.id); - } catch (Exception err ) { - logger.error(EELFLoggerDelegate.errorLogger, err.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw(err); - } - } - - private class ConnectionExpiryHandler - implements RemovalListener<String, Connection> { - - public void onRemoval(RemovalNotification<String, Connection> notification) { - String connectionId = notification.getKey(); - Connection doomed = notification.getValue(); - logger.debug("Expiring connection {} because {}", connectionId, notification.getCause()); - try { - if (doomed != null) { - doomed.close(); - } - } catch (Throwable t) { - logger.warn("Exception thrown while expiring connection {}", connectionId, t); - } - } - } -} - - diff --git a/src/main/java/org/onap/music/mdbc/MdbcStatement.java b/src/main/java/org/onap/music/mdbc/MdbcStatement.java deleted file mode 100644 index 93fe80a..0000000 --- a/src/main/java/org/onap/music/mdbc/MdbcStatement.java +++ /dev/null @@ -1,416 +0,0 @@ -package org.onap.music.mdbc; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.SQLWarning; -import java.sql.Statement; - -import org.onap.music.exceptions.QueryException; -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.logging.format.AppMessages; -import org.onap.music.logging.format.ErrorSeverity; -import org.onap.music.logging.format.ErrorTypes; - -/** - * ProxyStatement is a proxy Statement that front ends Statements from the underlying JDBC driver. It passes all operations through, - * and invokes the MusicSqlManager when there is the possibility that database tables have been created or dropped. - * - * @author Robert Eby - */ -public class MdbcStatement implements Statement { - private EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(MdbcStatement.class); - private static final String DATASTAX_PREFIX = "com.datastax.driver"; - - final Statement stmt; // the Statement that we are proxying - final MusicSqlManager mgr; - //\TODO We may need to all pass the connection object to support autocommit - - public MdbcStatement(Statement s, MusicSqlManager m) { - this.stmt = s; - this.mgr = m; - } - - public MdbcStatement(Statement stmt, String sql, MusicSqlManager mgr) { - //\TODO why there is a constructor with a sql parameter in a not PreparedStatement - this.stmt = stmt; - this.mgr = mgr; - } - - @Override - public <T> T unwrap(Class<T> iface) throws SQLException { - logger.error(EELFLoggerDelegate.errorLogger, "proxystatement unwrap: " + iface.getName()); - return stmt.unwrap(iface); - } - - @Override - public boolean isWrapperFor(Class<?> iface) throws SQLException { - logger.error(EELFLoggerDelegate.errorLogger, "proxystatement isWrapperFor: " + iface.getName()); - return stmt.isWrapperFor(iface); - } - - @Override - public ResultSet executeQuery(String sql) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeQuery: "+sql); - ResultSet r = null; - try { - mgr.preStatementHook(sql); - r = stmt.executeQuery(sql); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger, "executeQuery: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return r; - } - - @Override - public int executeUpdate(String sql) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeUpdate: "+sql); - - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger, "executeUpdate: exception "+nm+" "+e); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public void close() throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"Statement close: "); - stmt.close(); - } - - @Override - public int getMaxFieldSize() throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"getMaxFieldSize"); - return stmt.getMaxFieldSize(); - } - - @Override - public void setMaxFieldSize(int max) throws SQLException { - stmt.setMaxFieldSize(max); - } - - @Override - public int getMaxRows() throws SQLException { - return stmt.getMaxRows(); - } - - @Override - public void setMaxRows(int max) throws SQLException { - stmt.setMaxRows(max); - } - - @Override - public void setEscapeProcessing(boolean enable) throws SQLException { - stmt.setEscapeProcessing(enable); - } - - @Override - public int getQueryTimeout() throws SQLException { - return stmt.getQueryTimeout(); - } - - @Override - public void setQueryTimeout(int seconds) throws SQLException { - //\TODO: we also need to implement a higher level timeout in MDBC - logger.debug(EELFLoggerDelegate.applicationLogger,"setQueryTimeout seconds "+ seconds); - stmt.setQueryTimeout(seconds); - } - - @Override - public void cancel() throws SQLException { - stmt.cancel(); - } - - @Override - public SQLWarning getWarnings() throws SQLException { - return stmt.getWarnings(); - } - - @Override - public void clearWarnings() throws SQLException { - stmt.clearWarnings(); - } - - @Override - public void setCursorName(String name) throws SQLException { - stmt.setCursorName(name); - } - - @Override - public boolean execute(String sql) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"execute: "+sql); - boolean b = false; - //\TODO Add the result of the postStatementHook to b - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger, "execute: exception "+nm+" "+e); - // Note: this seems to be the only call Camunda uses, so it is the only one I am fixing for now. - boolean ignore = nm.startsWith(DATASTAX_PREFIX); -// ignore |= (nm.startsWith("org.h2.jdbc.JdbcSQLException") && e.getMessage().contains("already exists")); - if (ignore) { - logger.warn("execute: exception (IGNORED) "+nm); - } else { - logger.error(EELFLoggerDelegate.errorLogger, " Exception "+nm+" "+e); - throw e; - } - } - return b; - } - - @Override - public ResultSet getResultSet() throws SQLException { - return stmt.getResultSet(); - } - - @Override - public int getUpdateCount() throws SQLException { - return stmt.getUpdateCount(); - } - - @Override - public boolean getMoreResults() throws SQLException { - return stmt.getMoreResults(); - } - - @Override - public void setFetchDirection(int direction) throws SQLException { - stmt.setFetchDirection(direction); - } - - @Override - public int getFetchDirection() throws SQLException { - return stmt.getFetchDirection(); - } - - @Override - public void setFetchSize(int rows) throws SQLException { - stmt.setFetchSize(rows); - } - - @Override - public int getFetchSize() throws SQLException { - return stmt.getFetchSize(); - } - - @Override - public int getResultSetConcurrency() throws SQLException { - return stmt.getResultSetConcurrency(); - } - - @Override - public int getResultSetType() throws SQLException { - return stmt.getResultSetType(); - } - - @Override - public void addBatch(String sql) throws SQLException { - stmt.addBatch(sql); - } - - @Override - public void clearBatch() throws SQLException { - stmt.clearBatch(); - } - - @Override - public int[] executeBatch() throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeBatch: "); - int[] n = null; - try { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeBatch() is not supported by MDBC; your results may be incorrect as a result."); - n = stmt.executeBatch(); - synchronizeTables(null); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeBatch: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public Connection getConnection() throws SQLException { - return stmt.getConnection(); - } - - @Override - public boolean getMoreResults(int current) throws SQLException { - return stmt.getMoreResults(current); - } - - @Override - public ResultSet getGeneratedKeys() throws SQLException { - return stmt.getGeneratedKeys(); - } - - @Override - public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql, autoGeneratedKeys); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql, columnIndexes); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public int executeUpdate(String sql, String[] columnNames) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql, columnNames); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql, autoGeneratedKeys); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"execute: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return b; - } - - @Override - public boolean execute(String sql, int[] columnIndexes) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql, columnIndexes); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"execute: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return b; - } - - @Override - public boolean execute(String sql, String[] columnNames) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger,"execute: "+sql); - //\TODO Idem to the other execute without columnNames - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql, columnNames); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.error(EELFLoggerDelegate.errorLogger,"execute: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return b; - } - - @Override - public int getResultSetHoldability() throws SQLException { - return stmt.getResultSetHoldability(); - } - - @Override - public boolean isClosed() throws SQLException { - return stmt.isClosed(); - } - - @Override - public void setPoolable(boolean poolable) throws SQLException { - stmt.setPoolable(poolable); - } - - @Override - public boolean isPoolable() throws SQLException { - return stmt.isPoolable(); - } - - @Override - public void closeOnCompletion() throws SQLException { - stmt.closeOnCompletion(); - } - - @Override - public boolean isCloseOnCompletion() throws SQLException { - return stmt.isCloseOnCompletion(); - } - - protected void synchronizeTables(String sql) { - if (sql == null || sql.trim().toLowerCase().startsWith("create")) { - if (mgr != null) { - try { - mgr.synchronizeTables(); - } catch (QueryException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.QUERYERROR); - } - } - } - } -} diff --git a/src/main/java/org/onap/music/mdbc/MusicSqlManager.java b/src/main/java/org/onap/music/mdbc/MusicSqlManager.java deleted file mode 100755 index 741ee9e..0000000 --- a/src/main/java/org/onap/music/mdbc/MusicSqlManager.java +++ /dev/null @@ -1,308 +0,0 @@ -package org.onap.music.mdbc; - -import java.sql.Connection; -import java.util.*; - -import org.json.JSONObject; - -import org.onap.music.mdbc.mixins.DBInterface; -import org.onap.music.mdbc.mixins.MixinFactory; -import org.onap.music.mdbc.mixins.MusicInterface; -import org.onap.music.mdbc.mixins.Utils; -import org.onap.music.mdbc.tables.StagingTable; -import org.onap.music.mdbc.tables.TxCommitProgress; -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.exceptions.QueryException; -import org.onap.music.logging.*; -import org.onap.music.logging.format.AppMessages; -import org.onap.music.logging.format.ErrorSeverity; -import org.onap.music.logging.format.ErrorTypes; - -/** -* <p> -* MUSIC SQL Manager - code that helps take data written to a SQL database and seamlessly integrates it -* with <a href="https://github.com/att/music">MUSIC</a> that maintains data in a No-SQL data-store -* (<a href="http://cassandra.apache.org/">Cassandra</a>) and protects access to it with a distributed -* locking service (based on <a href="https://zookeeper.apache.org/">Zookeeper</a>). -* </p> -* <p> -* This code will support transactions by taking note of the value of the autoCommit flag, and of calls -* to <code>commit()</code> and <code>rollback()</code>. These calls should be made by the user's JDBC -* client. -* </p> -* -* @author Bharath Balasubramanian, Robert Eby -*/ -public class MusicSqlManager { - - private static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(MusicSqlManager.class); - - private final DBInterface dbi; - private final MusicInterface mi; - private final Set<String> table_set; - private final HashMap<Range,StagingTable> transactionDigest; - private boolean autocommit; // a copy of the autocommit flag from the JDBC Connection - - /** - * Build a MusicSqlManager for a DB connection. This construct may only be called by getMusicSqlManager(), - * which will ensure that only one MusicSqlManager is created per URL. - * This is the location where the appropriate mixins to use for the MusicSqlManager should be determined. - * They should be picked based upon the URL and the properties passed to this constructor. - * <p> - * At the present time, we only support the use of the H2Mixin (for access to a local H2 database), - * with the CassandraMixin (for direct access to a Cassandra noSQL DB as the persistence layer). - * </p> - * - * @param url the JDBC URL which was used to connection to the database - * @param conn the actual connection to the database - * @param info properties passed from the initial JDBC connect() call - * @throws MDBCServiceException - */ - public MusicSqlManager(String url, Connection conn, Properties info, MusicInterface mi) throws MDBCServiceException { - try { - info.putAll(Utils.getMdbcProperties()); - String mixinDb = info.getProperty(Configuration.KEY_DB_MIXIN_NAME, Configuration.DB_MIXIN_DEFAULT); - this.dbi = MixinFactory.createDBInterface(mixinDb, this, url, conn, info); - this.mi = mi; - this.table_set = Collections.synchronizedSet(new HashSet<String>()); - this.autocommit = true; - this.transactionDigest = new HashMap<Range,StagingTable>(); - - }catch(Exception e) { - throw new MDBCServiceException(e.getMessage()); - } - } - - public void setAutoCommit(boolean b,String txId, TxCommitProgress progressKeeper, DatabasePartition partition) throws MDBCServiceException { - if (b != autocommit) { - autocommit = b; - logger.debug(EELFLoggerDelegate.applicationLogger,"autocommit changed to "+b); - if (b) { - // My reading is that turning autoCOmmit ON should automatically commit any outstanding transaction - if(txId == null || txId.isEmpty()) { - logger.error(EELFLoggerDelegate.errorLogger, "Connection ID is null",AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.QUERYERROR); - throw new MDBCServiceException("tx id is null"); - } - commit(txId,progressKeeper,partition); - } - } - } - - /** - * Close this MusicSqlManager. - */ - public void close() { - if (dbi != null) { - dbi.close(); - } - } - - /** - * Code to be run within the DB driver before a SQL statement is executed. This is where tables - * can be synchronized before a SELECT, for those databases that do not support SELECT triggers. - * @param sql the SQL statement that is about to be executed - */ - public void preStatementHook(final String sql) { - dbi.preStatementHook(sql); - } - /** - * Code to be run within the DB driver after a SQL statement has been executed. This is where remote - * statement actions can be copied back to Cassandra/MUSIC. - * @param sql the SQL statement that was executed - */ - public void postStatementHook(final String sql) { - dbi.postStatementHook(sql,transactionDigest); - } - /** - * Synchronize the list of tables in SQL with the list in MUSIC. This function should be called when the - * proxy first starts, and whenever there is the possibility that tables were created or dropped. It is synchronized - * in order to prevent multiple threads from running this code in parallel. - */ - public synchronized void synchronizeTables() throws QueryException { - Set<String> set1 = dbi.getSQLTableSet(); // set of tables in the database - logger.debug(EELFLoggerDelegate.applicationLogger, "synchronizing tables:" + set1); - for (String tableName : set1) { - // This map will be filled in if this table was previously discovered - if (!table_set.contains(tableName) && !dbi.getReservedTblNames().contains(tableName)) { - logger.info(EELFLoggerDelegate.applicationLogger, "New table discovered: "+tableName); - try { - TableInfo ti = dbi.getTableInfo(tableName); - mi.initializeMusicForTable(ti,tableName); - //\TODO Verify if table info can be modify in the previous step, if not this step can be deleted - ti = dbi.getTableInfo(tableName); - mi.createDirtyRowTable(ti,tableName); - dbi.createSQLTriggers(tableName); - table_set.add(tableName); - synchronizeTableData(tableName); - logger.debug(EELFLoggerDelegate.applicationLogger, "synchronized tables:" + - table_set.size() + "/" + set1.size() + "tables uploaded"); - } catch (Exception e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.QUERYERROR); - //logger.error(EELFLoggerDelegate.errorLogger, "Exception synchronizeTables: "+e); - throw new QueryException(); - } - } - } - -// Set<String> set2 = getMusicTableSet(music_ns); - // not working - fix later -// for (String tbl : set2) { -// if (!set1.contains(tbl)) { -// logger.debug("Old table dropped: "+tbl); -// dropSQLTriggers(tbl, conn); -// // ZZTODO drop camunda table ? -// } -// } - } - - /** - * On startup, copy dirty data from Cassandra to H2. May not be needed. - * @param tableName - */ - public void synchronizeTableData(String tableName) { - // TODO - copy MUSIC -> H2 - dbi.synchronizeData(tableName); - } - /** - * This method is called whenever there is a SELECT on a local SQL table, and should be called by the underlying databases - * triggering mechanism. It first checks the local dirty bits table to see if there are any keys in Cassandra whose value - * has not yet been sent to SQL. If there are, the appropriate values are copied from Cassandra to the local database. - * Under normal execution, this function behaves as a NOP operation. - * @param tableName This is the table on which the SELECT is being performed - */ - public void readDirtyRowsAndUpdateDb(String tableName) { - mi.readDirtyRowsAndUpdateDb(dbi,tableName); - } - - - - - /** - * This method gets the primary key that the music interfaces uses by default. - * If the front end uses a primary key, this will not match what is used in the MUSIC interface - * @return - */ - public String getMusicDefaultPrimaryKeyName() { - return mi.getMusicDefaultPrimaryKeyName(); - } - - /** - * Asks music interface to provide the function to create a primary key - * e.g. uuid(), 1, "unique_aksd419fjc" - * @return - */ - public String generateUniqueKey() { - // - return mi.generateUniqueKey(); - } - - - /** - * Perform a commit, as requested by the JDBC driver. If any row updates have been delayed, - * they are performed now and copied into MUSIC. - * @throws MDBCServiceException - */ - public synchronized void commit(String txId, TxCommitProgress progressKeeper, DatabasePartition partition) throws MDBCServiceException { - logger.debug(EELFLoggerDelegate.applicationLogger, " commit "); - // transaction was committed -- add all the updates into the REDO-Log in MUSIC - try { - mi.commitLog(dbi, partition, transactionDigest, txId, progressKeeper); - }catch(MDBCServiceException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(), AppMessages.QUERYERROR, ErrorTypes.QUERYERROR, ErrorSeverity.CRITICAL); - throw e; - } - } - - /** - * Perform a rollback, as requested by the JDBC driver. If any row updates have been delayed, - * they are discarded. - */ - public synchronized void rollback() { - // transaction was rolled back - discard the updates - logger.debug(EELFLoggerDelegate.applicationLogger, "Rollback");; - transactionDigest.clear(); - } - - /** - * Get all - * @param table - * @param dbRow - * @return - */ - public String getMusicKeyFromRowWithoutPrimaryIndexes(String table, JSONObject dbRow) { - TableInfo ti = dbi.getTableInfo(table); - return mi.getMusicKeyFromRowWithoutPrimaryIndexes(ti,table, dbRow); - } - - public String getMusicKeyFromRow(String table, JSONObject dbRow) { - TableInfo ti = dbi.getTableInfo(table); - return mi.getMusicKeyFromRow(ti,table, dbRow); - } - - /** - * Returns all keys that matches the current sql statement, and not in already updated keys. - * - * @param sql the query that we are getting keys for - * @deprecated - */ - public ArrayList<String> getMusicKeys(String sql) { - ArrayList<String> musicKeys = new ArrayList<String>(); - //\TODO See if this is required - /* - try { - net.sf.jsqlparser.statement.Statement stmt = CCJSqlParserUtil.parse(sql); - if (stmt instanceof Insert) { - Insert s = (Insert) stmt; - String tbl = s.getTable().getName(); - musicKeys.add(generatePrimaryKey()); - } else { - String tbl; - String where = ""; - if (stmt instanceof Update){ - Update u = (Update) stmt; - tbl = u.getTables().get(0).getName(); - where = u.getWhere().toString(); - } else if (stmt instanceof Delete) { - Delete d = (Delete) stmt; - tbl = d.getTable().getName(); - if (d.getWhere()!=null) { - where = d.getWhere().toString(); - } - } else { - System.err.println("Not recognized sql type"); - tbl = ""; - } - String dbiSelect = "SELECT * FROM " + tbl; - if (!where.equals("")) { - dbiSelect += "WHERE" + where; - } - ResultSet rs = dbi.executeSQLRead(dbiSelect); - musicKeys.addAll(getMusicKeysWhere(tbl, Utils.parseResults(dbi.getTableInfo(tbl), rs))); - rs.getStatement().close(); - } - } catch (JSQLParserException | SQLException e) { - - e.printStackTrace(); - } - System.err.print("MusicKeys:"); - for(String musicKey:musicKeys) { - System.out.print(musicKey + ","); - } - */ - return musicKeys; - } - - public void own(List<Range> ranges) { - throw new java.lang.UnsupportedOperationException("function not implemented yet"); - } - - public void appendRange(String rangeId, List<Range> ranges) { - throw new java.lang.UnsupportedOperationException("function not implemented yet"); - } - - public void relinquish(String ownerId, String rangeId) { - throw new java.lang.UnsupportedOperationException("function not implemented yet"); - } - - -} diff --git a/src/main/java/org/onap/music/mdbc/ProxyStatement.java b/src/main/java/org/onap/music/mdbc/ProxyStatement.java deleted file mode 100755 index e84dc7b..0000000 --- a/src/main/java/org/onap/music/mdbc/ProxyStatement.java +++ /dev/null @@ -1,1262 +0,0 @@ -package org.onap.music.mdbc; - -import java.io.InputStream; -import java.io.Reader; -import java.math.BigDecimal; -import java.net.URL; -import java.sql.Array; -import java.sql.Blob; -import java.sql.CallableStatement; -import java.sql.Clob; -import java.sql.Connection; -import java.sql.Date; -import java.sql.NClob; -import java.sql.ParameterMetaData; -import java.sql.PreparedStatement; -import java.sql.Ref; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLWarning; -import java.sql.SQLXML; -import java.sql.Statement; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.Calendar; -import java.util.Map; - -import org.apache.log4j.Logger; - -import org.onap.music.exceptions.QueryException; - -/** - * ProxyStatement is a proxy Statement that front ends Statements from the underlying JDBC driver. It passes all operations through, - * and invokes the MusicSqlManager when there is the possibility that database tables have been created or dropped. - * - * @author Robert Eby - */ -public class ProxyStatement implements CallableStatement { - private static final Logger logger = Logger.getLogger(ProxyStatement.class); - private static final String DATASTAX_PREFIX = "com.datastax.driver"; - - private final Statement stmt; // the Statement that we are proxying - private final MusicSqlManager mgr; - - public ProxyStatement(Statement s, MusicSqlManager m) { - this.stmt = s; - this.mgr = m; - } - - @Override - public <T> T unwrap(Class<T> iface) throws SQLException { - return stmt.unwrap(iface); - } - - @Override - public boolean isWrapperFor(Class<?> iface) throws SQLException { - return stmt.isWrapperFor(iface); - } - - @Override - public ResultSet executeQuery(String sql) throws SQLException { - logger.debug("executeQuery: "+sql); - ResultSet r = null; - try { - mgr.preStatementHook(sql); - r = stmt.executeQuery(sql); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.warn("executeQuery: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return r; - } - - @Override - public int executeUpdate(String sql) throws SQLException { - logger.debug("executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.warn("executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public void close() throws SQLException { - stmt.close(); - } - - @Override - public int getMaxFieldSize() throws SQLException { - return stmt.getMaxFieldSize(); - } - - @Override - public void setMaxFieldSize(int max) throws SQLException { - stmt.setMaxFieldSize(max); - } - - @Override - public int getMaxRows() throws SQLException { - return stmt.getMaxRows(); - } - - @Override - public void setMaxRows(int max) throws SQLException { - stmt.setMaxRows(max); - } - - @Override - public void setEscapeProcessing(boolean enable) throws SQLException { - stmt.setEscapeProcessing(enable); - } - - @Override - public int getQueryTimeout() throws SQLException { - return stmt.getQueryTimeout(); - } - - @Override - public void setQueryTimeout(int seconds) throws SQLException { - stmt.setQueryTimeout(seconds); - } - - @Override - public void cancel() throws SQLException { - stmt.cancel(); - } - - @Override - public SQLWarning getWarnings() throws SQLException { - return stmt.getWarnings(); - } - - @Override - public void clearWarnings() throws SQLException { - stmt.clearWarnings(); - } - - @Override - public void setCursorName(String name) throws SQLException { - stmt.setCursorName(name); - } - - @Override - public boolean execute(String sql) throws SQLException { - logger.debug("execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - // Note: this seems to be the only call Camunda uses, so it is the only one I am fixing for now. - boolean ignore = nm.startsWith(DATASTAX_PREFIX); -// ignore |= (nm.startsWith("org.h2.jdbc.JdbcSQLException") && e.getMessage().contains("already exists")); - if (ignore) { - logger.warn("execute: exception (IGNORED) "+nm); - } else { - logger.warn("execute: exception "+nm); - throw e; - } - } - return b; - } - - @Override - public ResultSet getResultSet() throws SQLException { - return stmt.getResultSet(); - } - - @Override - public int getUpdateCount() throws SQLException { - return stmt.getUpdateCount(); - } - - @Override - public boolean getMoreResults() throws SQLException { - return stmt.getMoreResults(); - } - - @Override - public void setFetchDirection(int direction) throws SQLException { - stmt.setFetchDirection(direction); - } - - @Override - public int getFetchDirection() throws SQLException { - return stmt.getFetchDirection(); - } - - @Override - public void setFetchSize(int rows) throws SQLException { - stmt.setFetchSize(rows); - } - - @Override - public int getFetchSize() throws SQLException { - return stmt.getFetchSize(); - } - - @Override - public int getResultSetConcurrency() throws SQLException { - return stmt.getResultSetConcurrency(); - } - - @Override - public int getResultSetType() throws SQLException { - return stmt.getResultSetType(); - } - - @Override - public void addBatch(String sql) throws SQLException { - stmt.addBatch(sql); - } - - @Override - public void clearBatch() throws SQLException { - stmt.clearBatch(); - } - - @Override - public int[] executeBatch() throws SQLException { - logger.debug("executeBatch"); - int[] n = null; - try { - logger.warn("executeBatch() is not supported by MDBC; your results may be incorrect as a result."); - n = stmt.executeBatch(); - synchronizeTables(null); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.warn("executeBatch: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public Connection getConnection() throws SQLException { - return stmt.getConnection(); - } - - @Override - public boolean getMoreResults(int current) throws SQLException { - return stmt.getMoreResults(current); - } - - @Override - public ResultSet getGeneratedKeys() throws SQLException { - return stmt.getGeneratedKeys(); - } - - @Override - public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug("executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql, autoGeneratedKeys); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.warn("executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { - logger.debug("executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql, columnIndexes); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.warn("executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public int executeUpdate(String sql, String[] columnNames) throws SQLException { - logger.debug("executeUpdate: "+sql); - int n = 0; - try { - mgr.preStatementHook(sql); - n = stmt.executeUpdate(sql, columnNames); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.warn("executeUpdate: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return n; - } - - @Override - public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug("execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql, autoGeneratedKeys); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.warn("execute: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return b; - } - - @Override - public boolean execute(String sql, int[] columnIndexes) throws SQLException { - logger.debug("execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql, columnIndexes); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.warn("execute: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return b; - } - - @Override - public boolean execute(String sql, String[] columnNames) throws SQLException { - logger.debug("execute: "+sql); - boolean b = false; - try { - mgr.preStatementHook(sql); - b = stmt.execute(sql, columnNames); - mgr.postStatementHook(sql); - synchronizeTables(sql); - } catch (Exception e) { - String nm = e.getClass().getName(); - logger.warn("execute: exception "+nm); - if (!nm.startsWith(DATASTAX_PREFIX)) - throw e; - } - return b; - } - - @Override - public int getResultSetHoldability() throws SQLException { - return stmt.getResultSetHoldability(); - } - - @Override - public boolean isClosed() throws SQLException { - return stmt.isClosed(); - } - - @Override - public void setPoolable(boolean poolable) throws SQLException { - stmt.setPoolable(poolable); - } - - @Override - public boolean isPoolable() throws SQLException { - return stmt.isPoolable(); - } - - @Override - public void closeOnCompletion() throws SQLException { - stmt.closeOnCompletion(); - } - - @Override - public boolean isCloseOnCompletion() throws SQLException { - return stmt.isCloseOnCompletion(); - } - - @Override - public ResultSet executeQuery() throws SQLException { - logger.debug("executeQuery"); - return ((PreparedStatement)stmt).executeQuery(); - } - - @Override - public int executeUpdate() throws SQLException { - logger.debug("executeUpdate"); - return ((PreparedStatement)stmt).executeUpdate(); - } - - @Override - public void setNull(int parameterIndex, int sqlType) throws SQLException { - ((PreparedStatement)stmt).setNull(parameterIndex, sqlType); - } - - @Override - public void setBoolean(int parameterIndex, boolean x) throws SQLException { - ((PreparedStatement)stmt).setBoolean(parameterIndex, x); - } - - @Override - public void setByte(int parameterIndex, byte x) throws SQLException { - ((PreparedStatement)stmt).setByte(parameterIndex, x); - } - - @Override - public void setShort(int parameterIndex, short x) throws SQLException { - ((PreparedStatement)stmt).setShort(parameterIndex, x); - } - - @Override - public void setInt(int parameterIndex, int x) throws SQLException { - ((PreparedStatement)stmt).setInt(parameterIndex, x); - } - - @Override - public void setLong(int parameterIndex, long x) throws SQLException { - ((PreparedStatement)stmt).setLong(parameterIndex, x); - } - - @Override - public void setFloat(int parameterIndex, float x) throws SQLException { - ((PreparedStatement)stmt).setFloat(parameterIndex, x); - } - - @Override - public void setDouble(int parameterIndex, double x) throws SQLException { - ((PreparedStatement)stmt).setDouble(parameterIndex, x); - } - - @Override - public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - ((PreparedStatement)stmt).setBigDecimal(parameterIndex, x); - } - - @Override - public void setString(int parameterIndex, String x) throws SQLException { - ((PreparedStatement)stmt).setString(parameterIndex, x); - } - - @Override - public void setBytes(int parameterIndex, byte[] x) throws SQLException { - ((PreparedStatement)stmt).setBytes(parameterIndex, x); - } - - @Override - public void setDate(int parameterIndex, Date x) throws SQLException { - ((PreparedStatement)stmt).setDate(parameterIndex, x); - } - - @Override - public void setTime(int parameterIndex, Time x) throws SQLException { - ((PreparedStatement)stmt).setTime(parameterIndex, x); - } - - @Override - public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { - ((PreparedStatement)stmt).setTimestamp(parameterIndex, x); - } - - @Override - public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException { - ((PreparedStatement)stmt).setAsciiStream(parameterIndex, x, length); - } - - @SuppressWarnings("deprecation") - @Override - public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException { - ((PreparedStatement)stmt).setUnicodeStream(parameterIndex, x, length); - } - - @Override - public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException { - ((PreparedStatement)stmt).setBinaryStream(parameterIndex, x, length); - } - - @Override - public void clearParameters() throws SQLException { - ((PreparedStatement)stmt).clearParameters(); - } - - @Override - public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException { - ((PreparedStatement)stmt).setObject(parameterIndex, x, targetSqlType); - } - - @Override - public void setObject(int parameterIndex, Object x) throws SQLException { - ((PreparedStatement)stmt).setObject(parameterIndex, x); - } - - @Override - public boolean execute() throws SQLException { - return ((PreparedStatement)stmt).execute(); - } - - @Override - public void addBatch() throws SQLException { - ((PreparedStatement)stmt).addBatch(); - } - - @Override - public void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException { - ((PreparedStatement)stmt).setCharacterStream(parameterIndex, reader, length); - } - - @Override - public void setRef(int parameterIndex, Ref x) throws SQLException { - ((PreparedStatement)stmt).setRef(parameterIndex, x); - } - - @Override - public void setBlob(int parameterIndex, Blob x) throws SQLException { - ((PreparedStatement)stmt).setBlob(parameterIndex, x); - } - - @Override - public void setClob(int parameterIndex, Clob x) throws SQLException { - ((PreparedStatement)stmt).setClob(parameterIndex, x); - } - - @Override - public void setArray(int parameterIndex, Array x) throws SQLException { - ((PreparedStatement)stmt).setArray(parameterIndex, x); - } - - @Override - public ResultSetMetaData getMetaData() throws SQLException { - return ((PreparedStatement)stmt).getMetaData(); - } - - @Override - public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - ((PreparedStatement)stmt).setDate(parameterIndex, x, cal); - } - - @Override - public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - ((PreparedStatement)stmt).setTime(parameterIndex, x, cal); - } - - @Override - public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - ((CallableStatement)stmt).setTimestamp(parameterIndex, x, cal); - } - - @Override - public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - ((CallableStatement)stmt).setNull(parameterIndex, sqlType, typeName); - } - - @Override - public void setURL(int parameterIndex, URL x) throws SQLException { - ((CallableStatement)stmt).setURL(parameterIndex, x); - } - - @Override - public ParameterMetaData getParameterMetaData() throws SQLException { - return ((CallableStatement)stmt).getParameterMetaData(); - } - - @Override - public void setRowId(int parameterIndex, RowId x) throws SQLException { - ((CallableStatement)stmt).setRowId(parameterIndex, x); - } - - @Override - public void setNString(int parameterIndex, String value) throws SQLException { - ((CallableStatement)stmt).setNString(parameterIndex, value); - } - - @Override - public void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterIndex, value, length); - } - - @Override - public void setNClob(int parameterIndex, NClob value) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterIndex, value); - } - - @Override - public void setClob(int parameterIndex, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setClob(parameterIndex, reader, length); - } - - @Override - public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterIndex, inputStream, length); - } - - @Override - public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterIndex, reader, length); - } - - @Override - public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { - ((CallableStatement)stmt).setSQLXML(parameterIndex, xmlObject); - } - - @Override - public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - ((CallableStatement)stmt).setObject(parameterIndex, x, targetSqlType, scaleOrLength); - } - - @Override - public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterIndex, x, length); - } - - @Override - public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterIndex, x, length); - } - - @Override - public void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterIndex, reader, length); - } - - @Override - public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterIndex, x); - } - - @Override - public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterIndex, x); - } - - @Override - public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterIndex, reader); - } - - @Override - public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterIndex, value); - } - - @Override - public void setClob(int parameterIndex, Reader reader) throws SQLException { - ((CallableStatement)stmt).setClob(parameterIndex, reader); - } - - @Override - public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterIndex, inputStream); - } - - @Override - public void setNClob(int parameterIndex, Reader reader) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterIndex, reader); - } - - @Override - public void registerOutParameter(int parameterIndex, int sqlType) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterIndex, sqlType); - } - - @Override - public void registerOutParameter(int parameterIndex, int sqlType, int scale) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterIndex, sqlType, scale); - } - - @Override - public boolean wasNull() throws SQLException { - return ((CallableStatement)stmt).wasNull(); - } - - @Override - public String getString(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getString(parameterIndex); - } - - @Override - public boolean getBoolean(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getBoolean(parameterIndex); - } - - @Override - public byte getByte(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getByte(parameterIndex); - } - - @Override - public short getShort(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getShort(parameterIndex); - } - - @Override - public int getInt(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getInt(parameterIndex); - } - - @Override - public long getLong(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getLong(parameterIndex); - } - - @Override - public float getFloat(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getFloat(parameterIndex); - } - - @Override - public double getDouble(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getDouble(parameterIndex); - } - - @SuppressWarnings("deprecation") - @Override - public BigDecimal getBigDecimal(int parameterIndex, int scale) throws SQLException { - return ((CallableStatement)stmt).getBigDecimal(parameterIndex, scale); - } - - @Override - public byte[] getBytes(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getBytes(parameterIndex); - } - - @Override - public Date getDate(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getDate(parameterIndex); - } - - @Override - public Time getTime(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getTime(parameterIndex); - } - - @Override - public Timestamp getTimestamp(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getTimestamp(parameterIndex); - } - - @Override - public Object getObject(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterIndex); - } - - @Override - public BigDecimal getBigDecimal(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getBigDecimal(parameterIndex); - } - - @Override - public Object getObject(int parameterIndex, Map<String, Class<?>> map) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterIndex, map); - } - - @Override - public Ref getRef(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getRef(parameterIndex); - } - - @Override - public Blob getBlob(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getBlob(parameterIndex); - } - - @Override - public Clob getClob(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getClob(parameterIndex); - } - - @Override - public Array getArray(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getArray(parameterIndex); - } - - @Override - public Date getDate(int parameterIndex, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getDate(parameterIndex, cal); - } - - @Override - public Time getTime(int parameterIndex, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getTime(parameterIndex, cal); - } - - @Override - public Timestamp getTimestamp(int parameterIndex, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getTimestamp(parameterIndex, cal); - } - - @Override - public void registerOutParameter(int parameterIndex, int sqlType, String typeName) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterIndex, sqlType, typeName); - } - - @Override - public void registerOutParameter(String parameterName, int sqlType) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterName, sqlType); - } - - @Override - public void registerOutParameter(String parameterName, int sqlType, int scale) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterName, sqlType, scale); - } - - @Override - public void registerOutParameter(String parameterName, int sqlType, String typeName) throws SQLException { - ((CallableStatement)stmt).registerOutParameter(parameterName, sqlType, typeName); - } - - @Override - public URL getURL(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getURL(parameterIndex); - } - - @Override - public void setURL(String parameterName, URL val) throws SQLException { - ((CallableStatement)stmt).setURL(parameterName, val); - } - - @Override - public void setNull(String parameterName, int sqlType) throws SQLException { - ((CallableStatement)stmt).setNull(parameterName, sqlType); - } - - @Override - public void setBoolean(String parameterName, boolean x) throws SQLException { - ((CallableStatement)stmt).setBoolean(parameterName, x); - } - - @Override - public void setByte(String parameterName, byte x) throws SQLException { - ((CallableStatement)stmt).setByte(parameterName, x); - } - - @Override - public void setShort(String parameterName, short x) throws SQLException { - ((CallableStatement)stmt).setShort(parameterName, x); - } - - @Override - public void setInt(String parameterName, int x) throws SQLException { - ((CallableStatement)stmt).setInt(parameterName, x); - } - - @Override - public void setLong(String parameterName, long x) throws SQLException { - ((CallableStatement)stmt).setLong(parameterName, x); - } - - @Override - public void setFloat(String parameterName, float x) throws SQLException { - ((CallableStatement)stmt).setFloat(parameterName, x); - } - - @Override - public void setDouble(String parameterName, double x) throws SQLException { - ((CallableStatement)stmt).setDouble(parameterName, x); - } - - @Override - public void setBigDecimal(String parameterName, BigDecimal x) throws SQLException { - ((CallableStatement)stmt).setBigDecimal(parameterName, x); - } - - @Override - public void setString(String parameterName, String x) throws SQLException { - ((CallableStatement)stmt).setString(parameterName, x); - } - - @Override - public void setBytes(String parameterName, byte[] x) throws SQLException { - ((CallableStatement)stmt).setBytes(parameterName, x); - } - - @Override - public void setDate(String parameterName, Date x) throws SQLException { - ((CallableStatement)stmt).setDate(parameterName, x); - } - - @Override - public void setTime(String parameterName, Time x) throws SQLException { - ((CallableStatement)stmt).setTime(parameterName, x); - } - - @Override - public void setTimestamp(String parameterName, Timestamp x) throws SQLException { - ((CallableStatement)stmt).setTimestamp(parameterName, x); - } - - @Override - public void setAsciiStream(String parameterName, InputStream x, int length) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterName, x, length); - } - - @Override - public void setBinaryStream(String parameterName, InputStream x, int length) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterName, x, length); - } - - @Override - public void setObject(String parameterName, Object x, int targetSqlType, int scale) throws SQLException { - ((CallableStatement)stmt).setObject(parameterName, x, targetSqlType, scale); - } - - @Override - public void setObject(String parameterName, Object x, int targetSqlType) throws SQLException { - ((CallableStatement)stmt).setObject(parameterName, x, targetSqlType); - } - - @Override - public void setObject(String parameterName, Object x) throws SQLException { - ((CallableStatement)stmt).setObject(parameterName, x); - } - - @Override - public void setCharacterStream(String parameterName, Reader reader, int length) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterName, reader, length); - } - - @Override - public void setDate(String parameterName, Date x, Calendar cal) throws SQLException { - ((CallableStatement)stmt).setDate(parameterName, x, cal); - } - - @Override - public void setTime(String parameterName, Time x, Calendar cal) throws SQLException { - ((CallableStatement)stmt).setTime(parameterName, x, cal); - } - - @Override - public void setTimestamp(String parameterName, Timestamp x, Calendar cal) throws SQLException { - ((CallableStatement)stmt).setTimestamp(parameterName, x, cal); - } - - @Override - public void setNull(String parameterName, int sqlType, String typeName) throws SQLException { - ((CallableStatement)stmt).setNull(parameterName, sqlType, typeName); - } - - @Override - public String getString(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getString(parameterName); - } - - @Override - public boolean getBoolean(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getBoolean(parameterName); - } - - @Override - public byte getByte(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getByte(parameterName); - } - - @Override - public short getShort(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getShort(parameterName); - } - - @Override - public int getInt(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getInt(parameterName); - } - - @Override - public long getLong(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getLong(parameterName); - } - - @Override - public float getFloat(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getFloat(parameterName); - } - - @Override - public double getDouble(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getDouble(parameterName); - } - - @Override - public byte[] getBytes(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getBytes(parameterName); - } - - @Override - public Date getDate(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getDate(parameterName); - } - - @Override - public Time getTime(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getTime(parameterName); - } - - @Override - public Timestamp getTimestamp(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getTimestamp(parameterName); - } - - @Override - public Object getObject(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterName); - } - - @Override - public BigDecimal getBigDecimal(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getBigDecimal(parameterName); - } - - @Override - public Object getObject(String parameterName, Map<String, Class<?>> map) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterName, map); - } - - @Override - public Ref getRef(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getRef(parameterName); - } - - @Override - public Blob getBlob(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getBlob(parameterName); - } - - @Override - public Clob getClob(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getClob(parameterName); - } - - @Override - public Array getArray(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getArray(parameterName); - } - - @Override - public Date getDate(String parameterName, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getDate(parameterName, cal); - } - - @Override - public Time getTime(String parameterName, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getTime(parameterName, cal); - } - - @Override - public Timestamp getTimestamp(String parameterName, Calendar cal) throws SQLException { - return ((CallableStatement)stmt).getTimestamp(parameterName, cal); - } - - @Override - public URL getURL(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getURL(parameterName); - } - - @Override - public RowId getRowId(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getRowId(parameterIndex); - } - - @Override - public RowId getRowId(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getRowId(parameterName); - } - - @Override - public void setRowId(String parameterName, RowId x) throws SQLException { - ((CallableStatement)stmt).setRowId(parameterName, x); - } - - @Override - public void setNString(String parameterName, String value) throws SQLException { - ((CallableStatement)stmt).setNString(parameterName, value); - } - - @Override - public void setNCharacterStream(String parameterName, Reader value, long length) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterName, value, length); - } - - @Override - public void setNClob(String parameterName, NClob value) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterName, value); - } - - @Override - public void setClob(String parameterName, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setClob(parameterName, reader, length); - } - - @Override - public void setBlob(String parameterName, InputStream inputStream, long length) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterName, inputStream, length); - } - - @Override - public void setNClob(String parameterName, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterName, reader, length); - } - - @Override - public NClob getNClob(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getNClob(parameterIndex); - } - - @Override - public NClob getNClob(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getNClob(parameterName); - } - - @Override - public void setSQLXML(String parameterName, SQLXML xmlObject) throws SQLException { - ((CallableStatement)stmt).setSQLXML(parameterName, xmlObject); - } - - @Override - public SQLXML getSQLXML(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getSQLXML(parameterIndex); - } - - @Override - public SQLXML getSQLXML(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getSQLXML(parameterName); - } - - @Override - public String getNString(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getNString(parameterIndex); - } - - @Override - public String getNString(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getNString(parameterName); - } - - @Override - public Reader getNCharacterStream(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getNCharacterStream(parameterIndex); - } - - @Override - public Reader getNCharacterStream(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getNCharacterStream(parameterName); - } - - @Override - public Reader getCharacterStream(int parameterIndex) throws SQLException { - return ((CallableStatement)stmt).getCharacterStream(parameterIndex); - } - - @Override - public Reader getCharacterStream(String parameterName) throws SQLException { - return ((CallableStatement)stmt).getCharacterStream(parameterName); - } - - @Override - public void setBlob(String parameterName, Blob x) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterName, x); - } - - @Override - public void setClob(String parameterName, Clob x) throws SQLException { - ((CallableStatement)stmt).setClob(parameterName, x); - } - - @Override - public void setAsciiStream(String parameterName, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterName, x, length); - } - - @Override - public void setBinaryStream(String parameterName, InputStream x, long length) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterName, x, length); - } - - @Override - public void setCharacterStream(String parameterName, Reader reader, long length) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterName, reader, length); - } - - @Override - public void setAsciiStream(String parameterName, InputStream x) throws SQLException { - ((CallableStatement)stmt).setAsciiStream(parameterName, x); - } - - @Override - public void setBinaryStream(String parameterName, InputStream x) throws SQLException { - ((CallableStatement)stmt).setBinaryStream(parameterName, x); - } - - @Override - public void setCharacterStream(String parameterName, Reader reader) throws SQLException { - ((CallableStatement)stmt).setCharacterStream(parameterName, reader); - } - - @Override - public void setNCharacterStream(String parameterName, Reader value) throws SQLException { - ((CallableStatement)stmt).setNCharacterStream(parameterName, value); - } - - @Override - public void setClob(String parameterName, Reader reader) throws SQLException { - ((CallableStatement)stmt).setClob(parameterName, reader); - } - - @Override - public void setBlob(String parameterName, InputStream inputStream) throws SQLException { - ((CallableStatement)stmt).setBlob(parameterName, inputStream); - } - - @Override - public void setNClob(String parameterName, Reader reader) throws SQLException { - ((CallableStatement)stmt).setNClob(parameterName, reader); - } - - @Override - public <T> T getObject(int parameterIndex, Class<T> type) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterIndex, type); - } - - @Override - public <T> T getObject(String parameterName, Class<T> type) throws SQLException { - return ((CallableStatement)stmt).getObject(parameterName, type); - } - - private void synchronizeTables(String sql) { - if (sql == null || sql.trim().toLowerCase().startsWith("create")) { - if (mgr != null) { - try { - mgr.synchronizeTables(); - } catch (QueryException e) { - - e.printStackTrace(); - } - } - } - } -} diff --git a/src/main/java/org/onap/music/mdbc/Range.java b/src/main/java/org/onap/music/mdbc/Range.java deleted file mode 100644 index 8ed0150..0000000 --- a/src/main/java/org/onap/music/mdbc/Range.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.onap.music.mdbc; - -import java.io.Serializable; -import java.util.Objects; - - -/** - * This class represent a range of the whole database - * For now a range represents directly a table in Cassandra - * In the future we may decide to partition ranges differently - * @author Enrique Saurez - */ -public class Range implements Serializable { - - private static final long serialVersionUID = 1610744496930800088L; - - final public String table; - - public Range(String table) { - this.table = table; - } - - public String toString(){return table;} - - /** - * Compares to Range types - * @param o the other range against which this is compared - * @return the equality result - */ - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Range r = (Range) o; - return (table.equals(r.table)); - } - - @Override - public int hashCode(){ - return Objects.hash(table); - } - - public boolean overlaps(Range other) { - return table == other.table; - } -}
\ No newline at end of file diff --git a/src/main/java/org/onap/music/mdbc/RedoRow.java b/src/main/java/org/onap/music/mdbc/RedoRow.java deleted file mode 100644 index c9c83eb..0000000 --- a/src/main/java/org/onap/music/mdbc/RedoRow.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.onap.music.mdbc; - -import java.util.UUID; - -public class RedoRow { - private String redoTableName; - private UUID redoRowIndex; - - public RedoRow(){} - - public RedoRow(String redoTableName, UUID redoRowIndex){ - this.redoRowIndex = redoRowIndex; - this.redoTableName = redoTableName; - } - - public String getRedoTableName() { - return redoTableName; - } - - public void setRedoTableName(String redoTableName) { - this.redoTableName = redoTableName; - } - - public UUID getRedoRowIndex() { - return redoRowIndex; - } - - public void setRedoRowIndex(UUID redoRowIndex) { - this.redoRowIndex = redoRowIndex; - } -} diff --git a/src/main/java/org/onap/music/mdbc/StateManager.java b/src/main/java/org/onap/music/mdbc/StateManager.java deleted file mode 100644 index 4c7b9aa..0000000 --- a/src/main/java/org/onap/music/mdbc/StateManager.java +++ /dev/null @@ -1,209 +0,0 @@ -package org.onap.music.mdbc; - -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.logging.format.AppMessages; -import org.onap.music.logging.format.ErrorSeverity; -import org.onap.music.logging.format.ErrorTypes; -import org.onap.music.mdbc.mixins.MixinFactory; -import org.onap.music.mdbc.mixins.MusicInterface; -import org.onap.music.mdbc.mixins.MusicMixin; -import org.onap.music.mdbc.tables.TxCommitProgress; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * \TODO Implement an interface for the server logic and a factory - * @author Enrique Saurez - */ -public class StateManager { - - //\TODO We need to fix the auto-commit mode and multiple transactions with the same connection - - private static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(StateManager.class); - - /** - * This is the interface used by all the MusicSqlManagers, - * that are created by the MDBC Server - * @see MusicInterface - */ - private MusicInterface musicManager; - /** - * This is the Running Queries information table. - * It mainly contains information about the entities - * that have being committed so far. - */ - private TxCommitProgress transactionInfo; - - private Map<String,MdbcConnection> mdbcConnections; - - private String sqlDatabase; - - private String url; - - private Properties info; - - @SuppressWarnings("unused") - private DatabasePartition ranges; - - public StateManager(String url, Properties info, DatabasePartition ranges, String sqlDatabase) throws MDBCServiceException { - this.sqlDatabase = sqlDatabase; - this.ranges = ranges; - this.url = url; - this.info = info; - this.transactionInfo = new TxCommitProgress(); - //\fixme this is not really used, delete! - String cassandraUrl = info.getProperty(Configuration.KEY_CASSANDRA_URL, Configuration.CASSANDRA_URL_DEFAULT); - String mixin = info.getProperty(Configuration.KEY_MUSIC_MIXIN_NAME, Configuration.MUSIC_MIXIN_DEFAULT); - init(mixin, cassandraUrl); - } - - protected void init(String mixin, String cassandraUrl) throws MDBCServiceException { - this.musicManager = MixinFactory.createMusicInterface(mixin, cassandraUrl, info); - this.musicManager.createKeyspace(); - try { - this.musicManager.initializeMetricDataStructures(); - } catch (MDBCServiceException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.GENERALSERVICEERROR); - throw(e); - } - MusicMixin.loadProperties(); - this.mdbcConnections = new HashMap<>(); - initSqlDatabase(); - } - - protected void initSqlDatabase() throws MDBCServiceException { - try { - //\TODO: pass the driver as a variable - Class.forName("org.mariadb.jdbc.Driver"); - } - catch (ClassNotFoundException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.GENERALSERVICEERROR); - return; - } - try { - Connection sqlConnection = DriverManager.getConnection(this.url, this.info); - StringBuilder sql = new StringBuilder("CREATE DATABASE IF NOT EXISTS ") - .append(sqlDatabase) - .append(";"); - Statement stmt = sqlConnection.createStatement(); - stmt.execute(sql.toString()); - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.GENERALSERVICEERROR); - throw new MDBCServiceException(e.getMessage()); - } - } - - public void CloseConnection(String connectionId){ - //\TODO check if there is a race condition - if(mdbcConnections.containsKey(connectionId)) { - transactionInfo.deleteTxProgress(connectionId); - try { - Connection conn = mdbcConnections.get(connectionId); - if(conn!=null) - conn.close(); - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.GENERALSERVICEERROR); - } - mdbcConnections.remove(connectionId); - } - } - - public void OpenConnection(String id, Properties information){ - if(!mdbcConnections.containsKey(id)){ - Connection sqlConnection; - MdbcConnection newConnection; - //Create connection to local SQL DB - //\TODO: create function to generate connection outside of open connection and get connection - try { - //\TODO: pass the driver as a variable - Class.forName("org.mariadb.jdbc.Driver"); - } - catch (ClassNotFoundException e) { - // TODO Auto-generated catch block - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.GENERALSERVICEERROR); - return; - } - try { - sqlConnection = DriverManager.getConnection(this.url+"/"+this.sqlDatabase, this.info); - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.QUERYERROR, ErrorSeverity.CRITICAL, ErrorTypes.QUERYERROR); - sqlConnection = null; - } - //Create MDBC connection - try { - newConnection = new MdbcConnection(id, this.url+"/"+this.sqlDatabase, sqlConnection, info, this.musicManager, transactionInfo,ranges); - } catch (MDBCServiceException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.QUERYERROR); - newConnection = null; - return; - } - logger.info(EELFLoggerDelegate.applicationLogger,"Connection created for connection: "+id); - transactionInfo.createNewTransactionTracker(id, sqlConnection); - if(newConnection != null) { - mdbcConnections.put(id,newConnection); - } - } - } - - /** - * This function returns the connection to the corresponding transaction - * @param id of the transaction, created using - * @return - */ - public Connection GetConnection(String id) { - if(mdbcConnections.containsKey(id)) { - //\TODO: Verify if this make sense - // Intent: reinitialize transaction progress, when it already completed the previous tx for the same connection - if(transactionInfo.isComplete(id)) { - transactionInfo.reinitializeTxProgress(id); - } - return mdbcConnections.get(id); - } - - Connection sqlConnection; - MdbcConnection newConnection; - try { - //TODO: pass the driver as a variable - Class.forName("org.mariadb.jdbc.Driver"); - } - catch (ClassNotFoundException e) { - // TODO Auto-generated catch block - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.QUERYERROR, ErrorSeverity.CRITICAL, ErrorTypes.QUERYERROR); - } - - //Create connection to local SQL DB - try { - sqlConnection = DriverManager.getConnection(this.url+"/"+this.sqlDatabase, this.info); - } catch (SQLException e) { - logger.error("sql connection was not created correctly"); - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.QUERYERROR, ErrorSeverity.CRITICAL, ErrorTypes.QUERYERROR); - sqlConnection = null; - } - //Create MDBC connection - try { - newConnection = new MdbcConnection(id,this.url+"/"+this.sqlDatabase, sqlConnection, info, this.musicManager, transactionInfo,ranges); - } catch (MDBCServiceException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage(),AppMessages.UNKNOWNERROR, ErrorSeverity.CRITICAL, ErrorTypes.QUERYERROR); - newConnection = null; - } - logger.info(EELFLoggerDelegate.applicationLogger,"Connection created for connection: "+id); - - transactionInfo.createNewTransactionTracker(id, sqlConnection); - if(newConnection != null) { - mdbcConnections.put(id,newConnection); - } - return newConnection; - } - - public void InitializeSystem() { - //\TODO Prefetch data to system using the data ranges as guide - throw new UnsupportedOperationException("Function initialize system needs to be implemented id MdbcStateManager"); - } -} diff --git a/src/main/java/org/onap/music/mdbc/TableInfo.java b/src/main/java/org/onap/music/mdbc/TableInfo.java deleted file mode 100755 index ee272d8..0000000 --- a/src/main/java/org/onap/music/mdbc/TableInfo.java +++ /dev/null @@ -1,75 +0,0 @@ -package org.onap.music.mdbc; - -import java.sql.Types; -import java.util.ArrayList; -import java.util.List; - -/** - * Information about a table in the local database. It consists of three ordered list, which should all have the - * same length. A list of column names, a list of DB column types, and a list of booleans specifying which columns are keys. - * @author Robert P. Eby - */ -public class TableInfo { - /** An ordered list of the column names in this table */ - public List<String> columns; - /** An ordered list of the column types in this table; the types are integers taken from {@link java.sql.Types}. */ - public List<Integer> coltype; - /** An ordered list of booleans indicating if a column is a primary key column or not. */ - public List<Boolean> iskey; - - /** Construct an (initially) empty TableInfo. */ - public TableInfo() { - columns = new ArrayList<String>(); - coltype = new ArrayList<Integer>(); - iskey = new ArrayList<Boolean>(); - } - /** - * Check whether the column whose name is <i>name</i> is a primary key column. - * @param name the column name - * @return true if it is, false otherwise - */ - public boolean iskey(String name) { - for (int i = 0; i < columns.size(); i++) { - if (this.columns.get(i).equalsIgnoreCase(name)) - return this.iskey.get(i); - } - return false; - } - /** - * Get the type of the column whose name is <i>name</i>. - * @param name the column name - * @return the column type or Types.NULL - */ - public int getColType(String name) { - for (int i = 0; i < columns.size(); i++) { - if (this.columns.get(i).equalsIgnoreCase(name)) - return this.coltype.get(i); - } - return Types.NULL; - } - - /** - * Checks if this table has a primary key - * @return - */ - public boolean hasKey() { - for (Boolean b: iskey) { - if (b) { - return true; - } - } - return false; - } - - public List<String> getKeyColumns(){ - List<String> keys = new ArrayList<String>(); - int idx = 0; - for (Boolean b: iskey) { - if (b) { - keys.add(this.columns.get(idx)); - } - idx++; - } - return keys; - } -} diff --git a/src/main/java/org/onap/music/mdbc/configurations/NodeConfiguration.java b/src/main/java/org/onap/music/mdbc/configurations/NodeConfiguration.java deleted file mode 100644 index 156c901..0000000 --- a/src/main/java/org/onap/music/mdbc/configurations/NodeConfiguration.java +++ /dev/null @@ -1,71 +0,0 @@ -package org.onap.music.mdbc.configurations; - -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.mdbc.DatabasePartition; -import org.onap.music.mdbc.MDBCUtils; -import org.onap.music.mdbc.Range; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - -import java.io.BufferedReader; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.util.*; - -public class NodeConfiguration { - - private static transient final EELFLoggerDelegate LOG = EELFLoggerDelegate.getLogger(NodeConfiguration.class); - - public String sqlDatabaseName; - public DatabasePartition partition; - public String nodeName; - - public NodeConfiguration(String tables, UUID mriIndex, String mriTableName, String sqlDatabaseName, String node, String redoRecordsTable){ - // public DatabasePartition(List<Range> knownRanges, UUID mriIndex, String mriTable, String lockId, String musicTxDigestTable) { - partition = new DatabasePartition(toRanges(tables), mriIndex, mriTableName, null, redoRecordsTable) ; - this.sqlDatabaseName = sqlDatabaseName; - this.nodeName = node; - } - - protected List<Range> toRanges(String tables){ - List<Range> newRange = new ArrayList<>(); - String[] tablesArray=tables.split(","); - for(String table: tablesArray) { - newRange.add(new Range(table)); - } - return newRange; - } - - public String toJson() { - GsonBuilder builder = new GsonBuilder(); - builder.setPrettyPrinting().serializeNulls();; - Gson gson = builder.create(); - return gson.toJson(this); - } - - public void saveToFile(String file){ - try { - String serialized = this.toJson(); - MDBCUtils.saveToFile(serialized,file,LOG); - } catch (IOException e) { - e.printStackTrace(); - // Exit with error - System.exit(1); - } - } - - public static NodeConfiguration readJsonFromFile( String filepath) throws FileNotFoundException { - BufferedReader br; - try { - br = new BufferedReader( - new FileReader(filepath)); - } catch (FileNotFoundException e) { - LOG.error(EELFLoggerDelegate.errorLogger,"File was not found when reading json"+e); - throw e; - } - Gson gson = new Gson(); - NodeConfiguration config = gson.fromJson(br, NodeConfiguration.class); - return config; - } -} diff --git a/src/main/java/org/onap/music/mdbc/configurations/TablesConfiguration.java b/src/main/java/org/onap/music/mdbc/configurations/TablesConfiguration.java deleted file mode 100644 index c9f36e5..0000000 --- a/src/main/java/org/onap/music/mdbc/configurations/TablesConfiguration.java +++ /dev/null @@ -1,179 +0,0 @@ -package org.onap.music.mdbc.configurations; - -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.mdbc.DatabaseOperations; -import org.onap.music.mdbc.Range; -import org.onap.music.mdbc.RedoRow; - -import com.google.gson.Gson; -import org.onap.music.datastore.PreparedQueryObject; -import org.onap.music.exceptions.MusicServiceException; -import org.onap.music.main.MusicCore; - -import java.io.BufferedReader; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; - -public class TablesConfiguration { - - private final String TIT_TABLE_NAME = "transactioninformation"; - private final String MUSIC_TX_DIGEST_TABLE_NAME = "musictxdigest"; - - private transient static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(TablesConfiguration.class); - private List<PartitionInformation> partitions; - private String internalNamespace; - private int internalReplicationFactor; - private String musicNamespace; - private String tableToPartitionName; - private String partitionInformationTableName; - private String redoHistoryTableName; - private String sqlDatabaseName; - - public TablesConfiguration(){} - - /** - * This functions initalize all the corresponding tables and rows - * @return a list of node configurations to be used when starting each of the servers - * @throws MDBCServiceException - * @apiNote This function assumes that when used, there is not associated redo history in the tables to the tables that are going to be managed by this configuration file - */ - public List<NodeConfiguration> initializeAndCreateNodeConfigurations() throws MDBCServiceException { - initInternalNamespace(); - DatabaseOperations.createNamespace(musicNamespace, internalReplicationFactor); - List<NodeConfiguration> nodeConfigs = new ArrayList<>(); - if(partitions == null){ - logger.error("Partitions was not correctly initialized"); - throw new MDBCServiceException("Partition was not correctly initialized"); - } - for(PartitionInformation partitionInfo : partitions){ - String mriTableName = partitionInfo.mriTableName; - mriTableName = (mriTableName==null || mriTableName.isEmpty())?TIT_TABLE_NAME:mriTableName; - //0) Create the corresponding Music Range Information table - DatabaseOperations.createMusicRangeInformationTable(musicNamespace,mriTableName); - String musicTxDigestTableName = partitionInfo.mtxdTableName; - musicTxDigestTableName = (musicTxDigestTableName==null || musicTxDigestTableName.isEmpty())? MUSIC_TX_DIGEST_TABLE_NAME :musicTxDigestTableName; - DatabaseOperations.createMusicTxDigest(musicNamespace,musicTxDigestTableName); - String partitionId; - if(partitionInfo.partitionId==null || partitionInfo.partitionId.isEmpty()){ - if(partitionInfo.replicationFactor==0){ - logger.error("Replication factor and partition id are both empty, and this is an invalid configuration" ); - throw new MDBCServiceException("Replication factor and partition id are both empty, and this is an invalid configuration"); - } - //1) Create a row in the partition info table - //partitionId = DatabaseOperations.createPartitionInfoRow(musicNamespace,pitName,partitionInfo.replicationFactor,partitionInfo.tables,null); - - } - else{ - partitionId = partitionInfo.partitionId; - } - //2) Create a row in the transaction information table - UUID mriTableIndex = DatabaseOperations.createEmptyMriRow(musicNamespace,mriTableName,"",null,partitionInfo.getTables()); - //3) Add owner and tit information to partition info table - RedoRow newRedoRow = new RedoRow(mriTableName,mriTableIndex); - //DatabaseOperations.updateRedoRow(musicNamespace,pitName,partitionId,newRedoRow,partitionInfo.owner,null); - //4) Update ttp with the new partition - //for(String table: partitionInfo.tables) { - //DatabaseOperations.updateTableToPartition(musicNamespace, ttpName, table, partitionId, null); - //} - //5) Add it to the redo history table - //DatabaseOperations.createRedoHistoryBeginRow(musicNamespace,rhName,newRedoRow,partitionId,null); - //6) Create config for this node - StringBuilder newStr = new StringBuilder(); - for(Range r: partitionInfo.tables){ - newStr.append(r.toString()).append(","); - } - nodeConfigs.add(new NodeConfiguration(newStr.toString(),mriTableIndex,mriTableName,sqlDatabaseName,partitionInfo.owner,musicTxDigestTableName)); - } - return nodeConfigs; - } - - private void initInternalNamespace() throws MDBCServiceException { - DatabaseOperations.createNamespace(internalNamespace,internalReplicationFactor); - StringBuilder createKeysTableCql = new StringBuilder("CREATE TABLE IF NOT EXISTS ") - .append(internalNamespace) - .append(".unsynced_keys (key text PRIMARY KEY);"); - PreparedQueryObject queryObject = new PreparedQueryObject(); - queryObject.appendQueryString(createKeysTableCql.toString()); - try { - MusicCore.createTable(internalNamespace,"unsynced_keys", queryObject,"critical"); - } catch (MusicServiceException e) { - logger.error("Error creating unsynced keys table" ); - throw new MDBCServiceException("Error creating unsynced keys table"); - } - } - - public static TablesConfiguration readJsonFromFile(String filepath) throws FileNotFoundException { - BufferedReader br; - try { - br = new BufferedReader( - new FileReader(filepath)); - } catch (FileNotFoundException e) { - logger.error(EELFLoggerDelegate.errorLogger,"File was not found when reading json"+e); - throw e; - } - Gson gson = new Gson(); - TablesConfiguration config = gson.fromJson(br, TablesConfiguration.class); - return config; - } - - public class PartitionInformation{ - private List<Range> tables; - private String owner; - private String mriTableName; - private String mtxdTableName; - private String partitionId; - private int replicationFactor; - - public List<Range> getTables() { - return tables; - } - - public void setTables(List<Range> tables) { - this.tables = tables; - } - - public String getOwner() { - return owner; - } - - public void setOwner(String owner) { - this.owner = owner; - } - - public String getMriTableName() { - return mriTableName; - } - - public void setMriTableName(String mriTableName) { - this.mriTableName = mriTableName; - } - - public String getPartitionId() { - return partitionId; - } - - public void setPartitionId(String partitionId) { - this.partitionId = partitionId; - } - - public int getReplicationFactor() { - return replicationFactor; - } - - public void setReplicationFactor(int replicationFactor) { - this.replicationFactor = replicationFactor; - } - - public String getMtxdTableName(){ - return mtxdTableName; - } - - public void setMtxdTableName(String mtxdTableName) { - this.mtxdTableName = mtxdTableName; - } - } -} diff --git a/src/main/java/org/onap/music/mdbc/configurations/config-0.json b/src/main/java/org/onap/music/mdbc/configurations/config-0.json deleted file mode 100644 index 2207a52..0000000 --- a/src/main/java/org/onap/music/mdbc/configurations/config-0.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "sqlDatabaseName": "test", - "partition": { - "musicRangeInformationTable": "transactioninformation", - "musicRangeInformationIndex": "259a7a7c-f741-44ae-8d6e-227a02ddc96e", - "musicTxDigestTable": "musictxdigest", - "partitionId": "ad766447-1adf-4800-aade-9f31a356ab4b", - "lockId": "", - "ranges": [ - { - "table": "table11" - } - ] - }, - "nodeName": "" -} diff --git a/src/main/java/org/onap/music/mdbc/configurations/ranges.json b/src/main/java/org/onap/music/mdbc/configurations/ranges.json deleted file mode 100644 index 2a792e8..0000000 --- a/src/main/java/org/onap/music/mdbc/configurations/ranges.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "musicRangeInformationTable": "transactioninformation", - "musicRangeInformationIndex": "d0e8ef2e-aeca-4261-8d9d-1679f560b85b", - "partitionId": "798110cf-9c61-4db2-9446-cb2dbab5a143", - "lockId": "", - "ranges": [ - { - "table": "table1" - }, - { - "table": "table2" - } - ] -} diff --git a/src/main/java/org/onap/music/mdbc/configurations/tableConfiguration.json b/src/main/java/org/onap/music/mdbc/configurations/tableConfiguration.json deleted file mode 100644 index 383593a..0000000 --- a/src/main/java/org/onap/music/mdbc/configurations/tableConfiguration.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "partitions": [ - { - "tables": [ - { - "table": "table11" - } - ], - "owner": "", - "mriTableName": "musicrangeinformation", - "mtxdTableName": "musictxdigest", - "partitionId": "", - "replicationFactor": 1 - } - ], - "musicNamespace": "namespace", - "tableToPartitionName": "tabletopartition", - "partitionInformationTableName": "partitioninfo", - "redoHistoryTableName": "redohistory", - "sqlDatabaseName": "test", - "internalNamespace": "music_internal", - "internalReplicationFactor": 1 -} diff --git a/src/main/java/org/onap/music/mdbc/examples/EtdbTestClient.java b/src/main/java/org/onap/music/mdbc/examples/EtdbTestClient.java deleted file mode 100644 index 2a25667..0000000 --- a/src/main/java/org/onap/music/mdbc/examples/EtdbTestClient.java +++ /dev/null @@ -1,125 +0,0 @@ -package org.onap.music.mdbc.examples; - -import java.sql.*; -import org.apache.calcite.avatica.remote.Driver; - -public class EtdbTestClient { - - public static class Hr { - public final Employee[] emps = { - new Employee(100, "Bill"), - new Employee(200, "Eric"), - new Employee(150, "Sebastian"), - }; - } - - public static class Employee { - public final int empid; - public final String name; - - public Employee(int empid, String name) { - this.empid = empid; - this.name = name; - } - } - - public static void main(String[] args){ - try { - Class.forName("org.apache.calcite.avatica.remote.Driver"); - } catch (ClassNotFoundException e) { - e.printStackTrace(); - System.exit(1); - } - Connection connection; - try { - connection = DriverManager.getConnection("jdbc:avatica:remote:url=http://localhost:30000;serialization=protobuf"); - } catch (SQLException e) { - e.printStackTrace(); - return; - } - - try { - connection.setAutoCommit(false); - } catch (SQLException e) { - e.printStackTrace(); - return; - } - - - final String sql = "CREATE TABLE IF NOT EXISTS Persons (\n" + - " PersonID int,\n" + - " LastName varchar(255),\n" + - " FirstName varchar(255),\n" + - " Address varchar(255),\n" + - " City varchar(255)\n" + - ");"; - Statement stmt; - try { - stmt = connection.createStatement(); - } catch (SQLException e) { - e.printStackTrace(); - return; - } - - boolean execute; - try { - execute = stmt.execute(sql); - } catch (SQLException e) { - e.printStackTrace(); - return; - } - - if (execute) { - try { - connection.commit(); - } catch (SQLException e) { - e.printStackTrace(); - } - } - - try { - stmt.close(); - } catch (SQLException e) { - e.printStackTrace(); - } - - final String insertSQL = "INSERT INTO Persons VALUES (1, 'Martinez', 'Juan', 'KACB', 'ATLANTA');"; - Statement insertStmt; - try { - insertStmt = connection.createStatement(); - } catch (SQLException e) { - e.printStackTrace(); - return; - } - - try { - execute = insertStmt.execute(insertSQL); - } catch (SQLException e) { - e.printStackTrace(); - return; - } - - try { - connection.commit(); - } catch (SQLException e) { - e.printStackTrace(); - return; - } - - try { - stmt.close(); - insertStmt.close(); - } catch (SQLException e) { - e.printStackTrace(); - } - - try { - connection.commit(); - connection.close(); - } catch (SQLException e) { - e.printStackTrace(); - } - - - } -} diff --git a/src/main/java/org/onap/music/mdbc/mixins/Cassandra2Mixin.java b/src/main/java/org/onap/music/mdbc/mixins/Cassandra2Mixin.java deleted file mode 100755 index 1efb795..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/Cassandra2Mixin.java +++ /dev/null @@ -1,287 +0,0 @@ -package org.onap.music.mdbc.mixins; - -import java.sql.Types; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import org.json.JSONObject; -import org.json.JSONTokener; -import org.onap.music.datastore.PreparedQueryObject; -import org.onap.music.exceptions.MusicServiceException; -import org.onap.music.main.MusicCore; -import org.onap.music.main.ReturnType; - -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.mdbc.DatabasePartition; -import org.onap.music.mdbc.TableInfo; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; - -/** - * This class provides the methods that MDBC needs to access Cassandra directly in order to provide persistence - * to calls to the user's DB. It stores dirty row references in one table (called DIRTY____) rather than one dirty - * table per real table (as {@link org.onap.music.mdbc.mixins.CassandraMixin} does). - * - * @author Robert P. Eby - */ -public class Cassandra2Mixin extends CassandraMixin { - private static final String DIRTY_TABLE = "DIRTY____"; // it seems Cassandra won't allow __DIRTY__ - private boolean dirty_table_created = false; - - private EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(Cassandra2Mixin.class); - - public Cassandra2Mixin() { - super(); - } - - public Cassandra2Mixin(String url, Properties info) throws MusicServiceException { - super(url, info); - } - - /** - * Get the name of this MusicInterface mixin object. - * @return the name - */ - @Override - public String getMixinName() { - return "cassandra2"; - } - /** - * Do what is needed to close down the MUSIC connection. - */ - @Override - public void close() { - super.close(); - } - - /** - * This method creates a keyspace in Music/Cassandra to store the data corresponding to the SQL tables. - * The keyspace name comes from the initialization properties passed to the JDBC driver. - */ - @Override - public void createKeyspace() { - super.createKeyspace(); - } - - /** - * This method performs all necessary initialization in Music/Cassandra to store the table <i>tableName</i>. - * @param tableName the table to initialize MUSIC for - */ - @Override - public void initializeMusicForTable(TableInfo ti, String tableName) { - super.initializeMusicForTable(ti, tableName); - } - - /** - * Create a <i>dirty row</i> table for the real table <i>tableName</i>. The primary keys columns from the real table are recreated in - * the dirty table, along with a "REPLICA__" column that names the replica that should update it's internal state from MUSIC. - * @param tableName the table to create a "dirty" table for - */ - @Override - public void createDirtyRowTable(TableInfo ti, String tableName) { - if (!dirty_table_created) { - String cql = String.format("CREATE TABLE IF NOT EXISTS %s.%s (tablename TEXT, replica TEXT, keyset TEXT, PRIMARY KEY(tablename, replica, keyset));", music_ns, DIRTY_TABLE); - executeMusicWriteQuery(cql); - dirty_table_created = true; - } - } - /** - * Drop the dirty row table for <i>tableName</i> from MUSIC. - * @param tableName the table being dropped - */ - @Override - public void dropDirtyRowTable(String tableName) { - // no-op - } - - private String buildJSON(TableInfo ti, String tableName, Object[] keys) { - // Build JSON string representing this keyset - JSONObject jo = new JSONObject(); - int j = 0; - for (int i = 0; i < ti.columns.size(); i++) { - if (ti.iskey.get(i)) { - jo.put(ti.columns.get(i), keys[j++]); - } - } - return jo.toString(); - } - /** - * Remove the entries from the dirty row (for this replica) that correspond to a set of primary keys - * @param tableName the table we are removing dirty entries from - * @param keys the primary key values to use in the DELETE. Note: this is *only* the primary keys, not a full table row. - */ - @Override - public void cleanDirtyRow(TableInfo ti, String tableName, JSONObject keys) { - String cql = String.format("DELETE FROM %s.%s WHERE tablename = ? AND replica = ? AND keyset = ?;", music_ns, DIRTY_TABLE); - //Session sess = getMusicSession(); - //PreparedStatement ps = getPreparedStatementFromCache(cql); - Object[] values = new Object[] { tableName, myId, keys }; - logger.debug(EELFLoggerDelegate.applicationLogger,"Executing MUSIC write:"+ cql + " with values " + values[0] + " " + values[1] + " " + values[2]); - - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - pQueryObject.addValue(tableName); - pQueryObject.addValue(myId); - pQueryObject.addValue(keys); - ReturnType rt = MusicCore.eventualPut(pQueryObject); - if(rt.getResult().getResult().toLowerCase().equals("failure")) { - logger.error(EELFLoggerDelegate.errorLogger, "Failure while eventualPut...: "+rt.getMessage()); - } - /*BoundStatement bound = ps.bind(values); - bound.setReadTimeoutMillis(60000); - synchronized (sess) { - sess.execute(bound); - }*/ - } - /** - * Get a list of "dirty rows" for a table. The dirty rows returned apply only to this replica, - * and consist of a Map of primary key column names and values. - * @param tableName the table we are querying for - * @return a list of maps; each list item is a map of the primary key names and values for that "dirty row". - */ - @SuppressWarnings("deprecation") - @Override - public List<Map<String,Object>> getDirtyRows(TableInfo ti, String tableName) { - String cql = String.format("SELECT keyset FROM %s.%s WHERE tablename = ? AND replica = ?;", music_ns, DIRTY_TABLE); - logger.debug(EELFLoggerDelegate.applicationLogger,"Executing MUSIC write:"+ cql + " with values " + tableName + " " + myId); - - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - pQueryObject.addValue(tableName); - pQueryObject.addValue(myId); - ResultSet results = null; - try { - results = MusicCore.get(pQueryObject); - } catch (MusicServiceException e) { - e.printStackTrace(); - } - /*Session sess = getMusicSession(); - PreparedStatement ps = getPreparedStatementFromCache(cql); - BoundStatement bound = ps.bind(new Object[] { tableName, myId }); - bound.setReadTimeoutMillis(60000); - ResultSet results = null; - synchronized (sess) { - results = sess.execute(bound); - }*/ - List<Map<String,Object>> list = new ArrayList<Map<String,Object>>(); - for (Row row : results) { - String json = row.getString("keyset"); - JSONObject jo = new JSONObject(new JSONTokener(json)); - Map<String,Object> objs = new HashMap<String,Object>(); - for (String colname : jo.keySet()) { - int coltype = ti.getColType(colname); - switch (coltype) { - case Types.BIGINT: - objs.put(colname, jo.getLong(colname)); - break; - case Types.BOOLEAN: - objs.put(colname, jo.getBoolean(colname)); - break; - case Types.BLOB: - logger.error(EELFLoggerDelegate.errorLogger,"WE DO NOT SUPPORT BLOBS AS PRIMARY KEYS!! COLUMN NAME="+colname); - // throw an exception here??? - break; - case Types.DOUBLE: - objs.put(colname, jo.getDouble(colname)); - break; - case Types.INTEGER: - objs.put(colname, jo.getInt(colname)); - break; - case Types.TIMESTAMP: - objs.put(colname, new Date(jo.getString(colname))); - break; - case Types.VARCHAR: - default: - objs.put(colname, jo.getString(colname)); - break; - } - } - list.add(objs); - } - return list; - } - - /** - * Drops the named table and its dirty row table (for all replicas) from MUSIC. The dirty row table is dropped first. - * @param tableName This is the table that has been dropped - */ - @Override - public void clearMusicForTable(String tableName) { - super.clearMusicForTable(tableName); - } - /** - * This function is called whenever there is a DELETE to a row on a local SQL table, wherein it updates the - * MUSIC/Cassandra tables (both dirty bits and actual data) corresponding to the SQL write. MUSIC propagates - * it to the other replicas. - * - * @param tableName This is the table that has changed. - * @param oldRow This is a copy of the old row being deleted - */ - public void deleteFromEntityTableInMusic(TableInfo ti, String tableName, JSONObject oldRow) { - super.deleteFromEntityTableInMusic(ti, tableName, oldRow); - } - /** - * This method is called whenever there is a SELECT on a local SQL table, wherein it first checks the local - * dirty bits table to see if there are any keys in Cassandra whose value has not yet been sent to SQL - * @param tableName This is the table on which the select is being performed - */ - @Override - public void readDirtyRowsAndUpdateDb(DBInterface dbi, String tableName) { - super.readDirtyRowsAndUpdateDb(dbi, tableName); - } - - /** - * This method is called whenever there is an INSERT or UPDATE to a local SQL table, wherein it updates the - * MUSIC/Cassandra tables (both dirty bits and actual data) corresponding to the SQL write. Music propagates - * it to the other replicas. - * - * @param tableName This is the table that has changed. - * @param changedRow This is information about the row that has changed - */ - @Override - public void updateDirtyRowAndEntityTableInMusic(TableInfo ti, String tableName, JSONObject changedRow) { - super.updateDirtyRowAndEntityTableInMusic(ti, tableName, changedRow); - } - - /** - * Mark rows as "dirty" in the dirty rows table for <i>tableName</i>. Rows are marked for all replicas but - * this one (this replica already has the up to date data). - * @param tableName the table we are marking dirty - * @param keys an ordered list of the values being put into the table. The values that correspond to the tables' - * primary key are copied into the dirty row table. - */ - @Deprecated - public void markDirtyRow(TableInfo ti, String tableName, Object[] keys) { - String cql = String.format("INSERT INTO %s.%s (tablename, replica, keyset) VALUES (?, ?, ?);", music_ns, DIRTY_TABLE); - /*Session sess = getMusicSession(); - PreparedStatement ps = getPreparedStatementFromCache(cql);*/ - @SuppressWarnings("unused") - Object[] values = new Object[] { tableName, "", buildJSON(ti, tableName, keys) }; - PreparedQueryObject pQueryObject = null; - for (String repl : allReplicaIds) { - /*if (!repl.equals(myId)) { - values[1] = repl; - logger.info(EELFLoggerDelegate.applicationLogger,"Executing MUSIC write:"+ cql + " with values " + values[0] + " " + values[1] + " " + values[2]); - - BoundStatement bound = ps.bind(values); - bound.setReadTimeoutMillis(60000); - synchronized (sess) { - sess.execute(bound); - } - }*/ - pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - pQueryObject.addValue(tableName); - pQueryObject.addValue(repl); - pQueryObject.addValue(buildJSON(ti, tableName, keys)); - ReturnType rt = MusicCore.eventualPut(pQueryObject); - if(rt.getResult().getResult().toLowerCase().equals("failure")) { - System.out.println("Failure while critical put..."+rt.getMessage()); - } - } - } -} diff --git a/src/main/java/org/onap/music/mdbc/mixins/CassandraMixin.java b/src/main/java/org/onap/music/mdbc/mixins/CassandraMixin.java deleted file mode 100755 index 75eca0e..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/CassandraMixin.java +++ /dev/null @@ -1,1231 +0,0 @@ -package org.onap.music.mdbc.mixins; - -import java.io.IOException; -import java.io.Reader; -import java.nio.ByteBuffer; -import java.sql.Types; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.TreeSet; -import java.util.UUID; - -import org.onap.music.mdbc.*; -import org.onap.music.mdbc.DatabaseOperations; -import org.onap.music.mdbc.tables.PartitionInformation; -import org.onap.music.mdbc.tables.MusicTxDigestId; -import org.onap.music.mdbc.tables.StagingTable; -import org.onap.music.mdbc.tables.MriReference; -import org.onap.music.mdbc.tables.MusicRangeInformationRow; -import org.onap.music.mdbc.tables.TxCommitProgress; - -import org.json.JSONObject; -import org.onap.music.datastore.CassaLockStore; -import org.onap.music.datastore.PreparedQueryObject; -import org.onap.music.exceptions.MusicLockingException; -import org.onap.music.exceptions.MusicQueryException; -import org.onap.music.exceptions.MusicServiceException; -import org.onap.music.main.MusicCore; -import org.onap.music.main.ResultType; -import org.onap.music.main.ReturnType; - -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.logging.EELFLoggerDelegate; -import com.datastax.driver.core.BoundStatement; -import com.datastax.driver.core.ColumnDefinitions; -import com.datastax.driver.core.DataType; -import com.datastax.driver.core.PreparedStatement; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.Session; - -/** - * This class provides the methods that MDBC needs to access Cassandra directly in order to provide persistence - * to calls to the user's DB. It does not do any table or row locking. - * - * <p>This code only supports the following limited list of H2 and Cassandra data types:</p> - * <table summary=""> - * <tr><th>H2 Data Type</th><th>Mapped to Cassandra Data Type</th></tr> - * <tr><td>BIGINT</td><td>BIGINT</td></tr> - * <tr><td>BOOLEAN</td><td>BOOLEAN</td></tr> - * <tr><td>CLOB</td><td>BLOB</td></tr> - * <tr><td>DOUBLE</td><td>DOUBLE</td></tr> - * <tr><td>INTEGER</td><td>INT</td></tr> - * <tr><td>TIMESTAMP</td><td>TIMESTAMP</td></tr> - * <tr><td>VARBINARY</td><td>BLOB</td></tr> - * <tr><td>VARCHAR</td><td>VARCHAR</td></tr> - * </table> - * - * @author Robert P. Eby - */ -public class CassandraMixin implements MusicInterface { - /** The property name to use to identify this replica to MusicSqlManager */ - public static final String KEY_MY_ID = "myid"; - /** The property name to use for the comma-separated list of replica IDs. */ - public static final String KEY_REPLICAS = "replica_ids"; - /** The property name to use to identify the IP address for Cassandra. */ - public static final String KEY_MUSIC_ADDRESS = "music_address"; - /** The property name to use to provide the replication factor for Cassandra. */ - public static final String KEY_MUSIC_RFACTOR = "music_rfactor"; - /** The property name to use to provide the replication factor for Cassandra. */ - public static final String KEY_MUSIC_NAMESPACE = "music_namespace"; - /** The default property value to use for the Cassandra keyspace. */ - public static final String DEFAULT_MUSIC_KEYSPACE = "mdbc"; - /** The default property value to use for the Cassandra IP address. */ - public static final String DEFAULT_MUSIC_ADDRESS = "localhost"; - /** The default property value to use for the Cassandra replication factor. */ - public static final int DEFAULT_MUSIC_RFACTOR = 1; - /** The default primary string column, if none is provided. */ - public static final String MDBC_PRIMARYKEY_NAME = "mdbc_cuid"; - /** Type of the primary key, if none is defined by the user */ - public static final String MDBC_PRIMARYKEY_TYPE = "uuid"; - /** Namespace for the tables in MUSIC (Cassandra) */ - public static final String DEFAULT_MUSIC_NAMESPACE = "namespace"; - - //\TODO Add logic to change the names when required and create the tables when necessary - private String musicTxDigestTableName = "musictxdigest"; - private String musicRangeInformationTableName = "musicrangeinformation"; - - private EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(CassandraMixin.class); - - private static final Map<Integer, String> typemap = new HashMap<>(); - static { - // We only support the following type mappings currently (from DB -> Cassandra). - // Anything else will likely cause a NullPointerException - typemap.put(Types.BIGINT, "BIGINT"); // aka. IDENTITY - typemap.put(Types.BLOB, "VARCHAR"); - typemap.put(Types.BOOLEAN, "BOOLEAN"); - typemap.put(Types.CLOB, "BLOB"); - typemap.put(Types.DATE, "VARCHAR"); - typemap.put(Types.DOUBLE, "DOUBLE"); - typemap.put(Types.DECIMAL, "DECIMAL"); - typemap.put(Types.INTEGER, "INT"); - //typemap.put(Types.TIMESTAMP, "TIMESTAMP"); - typemap.put(Types.SMALLINT, "SMALLINT"); - typemap.put(Types.TIMESTAMP, "VARCHAR"); - typemap.put(Types.VARBINARY, "BLOB"); - typemap.put(Types.VARCHAR, "VARCHAR"); - typemap.put(Types.CHAR, "VARCHAR"); - //The "Hacks", these don't have a direct mapping - //typemap.put(Types.DATE, "VARCHAR"); - //typemap.put(Types.DATE, "TIMESTAMP"); - } - - protected final String music_ns; - protected final String myId; - protected final String[] allReplicaIds; - private final String musicAddress; - private final int music_rfactor; - private MusicConnector mCon = null; - private Session musicSession = null; - private boolean keyspace_created = false; - private Map<String, PreparedStatement> ps_cache = new HashMap<>(); - private Set<String> in_progress = Collections.synchronizedSet(new HashSet<String>()); - - public CassandraMixin() { - //this.logger = null; - this.musicAddress = null; - this.music_ns = null; - this.music_rfactor = 0; - this.myId = null; - this.allReplicaIds = null; - } - - public CassandraMixin(String url, Properties info) throws MusicServiceException { - // Default values -- should be overridden in the Properties - // Default to using the host_ids of the various peers as the replica IDs (this is probably preferred) - this.musicAddress = info.getProperty(KEY_MUSIC_ADDRESS, DEFAULT_MUSIC_ADDRESS); - logger.info(EELFLoggerDelegate.applicationLogger,"MusicSqlManager: musicAddress="+musicAddress); - - String s = info.getProperty(KEY_MUSIC_RFACTOR); - this.music_rfactor = (s == null) ? DEFAULT_MUSIC_RFACTOR : Integer.parseInt(s); - - this.myId = info.getProperty(KEY_MY_ID, getMyHostId()); - logger.info(EELFLoggerDelegate.applicationLogger,"MusicSqlManager: myId="+myId); - - - this.allReplicaIds = info.getProperty(KEY_REPLICAS, getAllHostIds()).split(","); - logger.info(EELFLoggerDelegate.applicationLogger,"MusicSqlManager: allReplicaIds="+info.getProperty(KEY_REPLICAS, this.myId)); - - this.music_ns = info.getProperty(KEY_MUSIC_NAMESPACE,DEFAULT_MUSIC_NAMESPACE); - logger.info(EELFLoggerDelegate.applicationLogger,"MusicSqlManager: music_ns="+music_ns); - musicRangeInformationTableName = "musicrangeinformation"; - createMusicKeyspace(); - } - - private void createMusicKeyspace() throws MusicServiceException { - - Map<String,Object> replicationInfo = new HashMap<>(); - replicationInfo.put("'class'", "'SimpleStrategy'"); - replicationInfo.put("'replication_factor'", music_rfactor); - - PreparedQueryObject queryObject = new PreparedQueryObject(); - queryObject.appendQueryString( - "CREATE KEYSPACE " + this.music_ns + " WITH REPLICATION = " + replicationInfo.toString().replaceAll("=", ":")); - - try { - MusicCore.nonKeyRelatedPut(queryObject, "eventual"); - } catch (MusicServiceException e) { - if (e.getMessage().equals("Keyspace "+this.music_ns+" already exists")) { - // ignore - } else { - throw(e); - } - } - } - - private String getMyHostId() { - ResultSet rs = executeMusicRead("SELECT HOST_ID FROM SYSTEM.LOCAL"); - Row row = rs.one(); - return (row == null) ? "UNKNOWN" : row.getUUID("HOST_ID").toString(); - } - private String getAllHostIds() { - ResultSet results = executeMusicRead("SELECT HOST_ID FROM SYSTEM.PEERS"); - StringBuilder sb = new StringBuilder(myId); - for (Row row : results) { - sb.append(","); - sb.append(row.getUUID("HOST_ID").toString()); - } - return sb.toString(); - } - - /** - * Get the name of this MusicInterface mixin object. - * @return the name - */ - @Override - public String getMixinName() { - return "cassandra"; - } - /** - * Do what is needed to close down the MUSIC connection. - */ - @Override - public void close() { - if (musicSession != null) { - musicSession.close(); - musicSession = null; - } - } - @Override - public void initializeMetricDataStructures() throws MDBCServiceException { - try { - DatabaseOperations.createMusicTxDigest(music_ns, musicTxDigestTableName);//\TODO If we start partitioning the data base, we would need to use the redotable number - DatabaseOperations.createMusicRangeInformationTable(music_ns, musicRangeInformationTableName); - } - catch(MDBCServiceException e){ - logger.error(EELFLoggerDelegate.errorLogger,"Error creating tables in MUSIC"); - } - } - - /** - * This method creates a keyspace in Music/Cassandra to store the data corresponding to the SQL tables. - * The keyspace name comes from the initialization properties passed to the JDBC driver. - */ - @Override - public void createKeyspace() { - if (keyspace_created == false) { - String cql = String.format("CREATE KEYSPACE IF NOT EXISTS %s WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : %d };", music_ns, music_rfactor); - executeMusicWriteQuery(cql); - keyspace_created = true; - } - } - - /** - * This method performs all necessary initialization in Music/Cassandra to store the table <i>tableName</i>. - * @param tableName the table to initialize MUSIC for - */ - @Override - public void initializeMusicForTable(TableInfo ti, String tableName) { - /** - * This code creates two tables for every table in SQL: - * (i) a table with the exact same name as the SQL table storing the SQL data. - * (ii) a "dirty bits" table that stores the keys in the Cassandra table that are yet to be - * updated in the SQL table (they were written by some other node). - */ - StringBuilder fields = new StringBuilder(); - StringBuilder prikey = new StringBuilder(); - String pfx = "", pfx2 = ""; - for (int i = 0; i < ti.columns.size(); i++) { - fields.append(pfx) - .append(ti.columns.get(i)) - .append(" ") - .append(typemap.get(ti.coltype.get(i))); - if (ti.iskey.get(i)) { - // Primary key column - prikey.append(pfx2).append(ti.columns.get(i)); - pfx2 = ", "; - } - pfx = ", "; - } - if (prikey.length()==0) { - fields.append(pfx).append(MDBC_PRIMARYKEY_NAME) - .append(" ") - .append(MDBC_PRIMARYKEY_TYPE); - prikey.append("mdbc_cuid"); - } - String cql = String.format("CREATE TABLE IF NOT EXISTS %s.%s (%s, PRIMARY KEY (%s));", music_ns, tableName, fields.toString(), prikey.toString()); - executeMusicWriteQuery(cql); - } - - // ************************************************** - // Dirty Tables (in MUSIC) methods - // ************************************************** - - /** - * Create a <i>dirty row</i> table for the real table <i>tableName</i>. The primary keys columns from the real table are recreated in - * the dirty table, along with a "REPLICA__" column that names the replica that should update it's internal state from MUSIC. - * @param tableName the table to create a "dirty" table for - */ - @Override - public void createDirtyRowTable(TableInfo ti, String tableName) { - // create dirtybitsTable at all replicas -// for (String repl : allReplicaIds) { -//// String dirtyRowsTableName = "dirty_"+tableName+"_"+allReplicaIds[i]; -//// String dirtyTableQuery = "CREATE TABLE IF NOT EXISTS "+music_ns+"."+ dirtyRowsTableName+" (dirtyRowKeys text PRIMARY KEY);"; -// cql = String.format("CREATE TABLE IF NOT EXISTS %s.DIRTY_%s_%s (dirtyRowKeys TEXT PRIMARY KEY);", music_ns, tableName, repl); -// executeMusicWriteQuery(cql); -// } - StringBuilder ddl = new StringBuilder("REPLICA__ TEXT"); - StringBuilder cols = new StringBuilder("REPLICA__"); - for (int i = 0; i < ti.columns.size(); i++) { - if (ti.iskey.get(i)) { - // Only use the primary keys columns in the "Dirty" table - ddl.append(", ") - .append(ti.columns.get(i)) - .append(" ") - .append(typemap.get(ti.coltype.get(i))); - cols.append(", ").append(ti.columns.get(i)); - } - } - if(cols.length()==0) { - //fixme - System.err.println("Create dirty row table found no primary key"); - } - ddl.append(", PRIMARY KEY(").append(cols).append(")"); - String cql = String.format("CREATE TABLE IF NOT EXISTS %s.DIRTY_%s (%s);", music_ns, tableName, ddl.toString()); - executeMusicWriteQuery(cql); - } - /** - * Drop the dirty row table for <i>tableName</i> from MUSIC. - * @param tableName the table being dropped - */ - @Override - public void dropDirtyRowTable(String tableName) { - String cql = String.format("DROP TABLE %s.DIRTY_%s;", music_ns, tableName); - executeMusicWriteQuery(cql); - } - /** - * Mark rows as "dirty" in the dirty rows table for <i>tableName</i>. Rows are marked for all replicas but - * this one (this replica already has the up to date data). - * @param tableName the table we are marking dirty - * @param keys an ordered list of the values being put into the table. The values that correspond to the tables' - * primary key are copied into the dirty row table. - */ - @Override - public void markDirtyRow(TableInfo ti, String tableName, JSONObject keys) { - Object[] keyObj = getObjects(ti,tableName, keys); - StringBuilder cols = new StringBuilder("REPLICA__"); - PreparedQueryObject pQueryObject = null; - StringBuilder vals = new StringBuilder("?"); - List<Object> vallist = new ArrayList<Object>(); - vallist.add(""); // placeholder for replica - for (int i = 0; i < ti.columns.size(); i++) { - if (ti.iskey.get(i)) { - cols.append(", ").append(ti.columns.get(i)); - vals.append(", ").append("?"); - vallist.add(keyObj[i]); - } - } - if(cols.length()==0) { - //FIXME - System.err.println("markDIrtyRow need to fix primary key"); - } - String cql = String.format("INSERT INTO %s.DIRTY_%s (%s) VALUES (%s);", music_ns, tableName, cols.toString(), vals.toString()); - /*Session sess = getMusicSession(); - PreparedStatement ps = getPreparedStatementFromCache(cql);*/ - String primaryKey; - if(ti.hasKey()) { - primaryKey = getMusicKeyFromRow(ti,tableName, keys); - } - else { - primaryKey = getMusicKeyFromRowWithoutPrimaryIndexes(ti,tableName, keys); - } - System.out.println("markDirtyRow: PK value: "+primaryKey); - - Object pkObj = null; - for (int i = 0; i < ti.columns.size(); i++) { - if (ti.iskey.get(i)) { - pkObj = keyObj[i]; - } - } - for (String repl : allReplicaIds) { - pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - pQueryObject.addValue(tableName); - pQueryObject.addValue(repl); - pQueryObject.addValue(pkObj); - updateMusicDB(tableName, primaryKey, pQueryObject); - //if (!repl.equals(myId)) { - /*logger.info(EELFLoggerDelegate.applicationLogger,"Executing MUSIC write:"+ cql); - vallist.set(0, repl); - BoundStatement bound = ps.bind(vallist.toArray()); - bound.setReadTimeoutMillis(60000); - synchronized (sess) { - sess.execute(bound); - }*/ - //} - - } - } - /** - * Remove the entries from the dirty row (for this replica) that correspond to a set of primary keys - * @param tableName the table we are removing dirty entries from - * @param keys the primary key values to use in the DELETE. Note: this is *only* the primary keys, not a full table row. - */ - @Override - public void cleanDirtyRow(TableInfo ti, String tableName, JSONObject keys) { - Object[] keysObjects = getObjects(ti,tableName,keys); - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - StringBuilder cols = new StringBuilder("REPLICA__=?"); - List<Object> vallist = new ArrayList<Object>(); - vallist.add(myId); - int n = 0; - for (int i = 0; i < ti.columns.size(); i++) { - if (ti.iskey.get(i)) { - cols.append(" AND ").append(ti.columns.get(i)).append("=?"); - vallist.add(keysObjects[n++]); - pQueryObject.addValue(keysObjects[n++]); - } - } - String cql = String.format("DELETE FROM %s.DIRTY_%s WHERE %s;", music_ns, tableName, cols.toString()); - logger.debug(EELFLoggerDelegate.applicationLogger,"Executing MUSIC write:"+ cql); - pQueryObject.appendQueryString(cql); - ReturnType rt = MusicCore.eventualPut(pQueryObject); - if(rt.getResult().getResult().toLowerCase().equals("failure")) { - System.out.println("Failure while cleanDirtyRow..."+rt.getMessage()); - } - /*Session sess = getMusicSession(); - PreparedStatement ps = getPreparedStatementFromCache(cql); - BoundStatement bound = ps.bind(vallist.toArray()); - bound.setReadTimeoutMillis(60000); - synchronized (sess) { - sess.execute(bound); - }*/ - } - /** - * Get a list of "dirty rows" for a table. The dirty rows returned apply only to this replica, - * and consist of a Map of primary key column names and values. - * @param tableName the table we are querying for - * @return a list of maps; each list item is a map of the primary key names and values for that "dirty row". - */ - @Override - public List<Map<String,Object>> getDirtyRows(TableInfo ti, String tableName) { - String cql = String.format("SELECT * FROM %s.DIRTY_%s WHERE REPLICA__=?;", music_ns, tableName); - ResultSet results = null; - logger.debug(EELFLoggerDelegate.applicationLogger,"Executing MUSIC write:"+ cql); - - /*Session sess = getMusicSession(); - PreparedStatement ps = getPreparedStatementFromCache(cql); - BoundStatement bound = ps.bind(new Object[] { myId }); - bound.setReadTimeoutMillis(60000); - synchronized (sess) { - results = sess.execute(bound); - }*/ - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - try { - results = MusicCore.get(pQueryObject); - } catch (MusicServiceException e) { - - e.printStackTrace(); - } - - ColumnDefinitions cdef = results.getColumnDefinitions(); - List<Map<String,Object>> list = new ArrayList<Map<String,Object>>(); - for (Row row : results) { - Map<String,Object> objs = new HashMap<String,Object>(); - for (int i = 0; i < cdef.size(); i++) { - String colname = cdef.getName(i).toUpperCase(); - String coltype = cdef.getType(i).getName().toString().toUpperCase(); - if (!colname.equals("REPLICA__")) { - switch (coltype) { - case "BIGINT": - objs.put(colname, row.getLong(colname)); - break; - case "BOOLEAN": - objs.put(colname, row.getBool(colname)); - break; - case "BLOB": - objs.put(colname, row.getString(colname)); - break; - case "DATE": - objs.put(colname, row.getString(colname)); - break; - case "DOUBLE": - objs.put(colname, row.getDouble(colname)); - break; - case "DECIMAL": - objs.put(colname, row.getDecimal(colname)); - break; - case "INT": - objs.put(colname, row.getInt(colname)); - break; - case "TIMESTAMP": - objs.put(colname, row.getTimestamp(colname)); - break; - case "VARCHAR": - default: - objs.put(colname, row.getString(colname)); - break; - } - } - } - list.add(objs); - } - return list; - } - - /** - * Drops the named table and its dirty row table (for all replicas) from MUSIC. The dirty row table is dropped first. - * @param tableName This is the table that has been dropped - */ - @Override - public void clearMusicForTable(String tableName) { - dropDirtyRowTable(tableName); - String cql = String.format("DROP TABLE %s.%s;", music_ns, tableName); - executeMusicWriteQuery(cql); - } - /** - * This function is called whenever there is a DELETE to a row on a local SQL table, wherein it updates the - * MUSIC/Cassandra tables (both dirty bits and actual data) corresponding to the SQL write. MUSIC propagates - * it to the other replicas. - * - * @param tableName This is the table that has changed. - * @param oldRow This is a copy of the old row being deleted - */ - @Override - public void deleteFromEntityTableInMusic(TableInfo ti, String tableName, JSONObject oldRow) { - Object[] objects = getObjects(ti,tableName,oldRow); - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - if (ti.hasKey()) { - assert(ti.columns.size() == objects.length); - } else { - assert(ti.columns.size()+1 == objects.length); - } - - StringBuilder where = new StringBuilder(); - List<Object> vallist = new ArrayList<Object>(); - String pfx = ""; - for (int i = 0; i < ti.columns.size(); i++) { - if (ti.iskey.get(i)) { - where.append(pfx) - .append(ti.columns.get(i)) - .append("=?"); - vallist.add(objects[i]); - pQueryObject.addValue(objects[i]); - pfx = " AND "; - } - } - if (!ti.hasKey()) { - where.append(MDBC_PRIMARYKEY_NAME + "=?"); - //\FIXME this is wrong, old row is not going to contain the UUID, this needs to be fixed - vallist.add(UUID.fromString((String) objects[0])); - pQueryObject.addValue(UUID.fromString((String) objects[0])); - } - - String cql = String.format("DELETE FROM %s.%s WHERE %s;", music_ns, tableName, where.toString()); - logger.error(EELFLoggerDelegate.errorLogger,"Executing MUSIC write:"+ cql); - pQueryObject.appendQueryString(cql); - - /*PreparedStatement ps = getPreparedStatementFromCache(cql); - BoundStatement bound = ps.bind(vallist.toArray()); - bound.setReadTimeoutMillis(60000); - Session sess = getMusicSession(); - synchronized (sess) { - sess.execute(bound); - }*/ - String primaryKey = getMusicKeyFromRow(ti,tableName, oldRow); - - updateMusicDB(tableName, primaryKey, pQueryObject); - - // Mark the dirty rows in music for all the replicas but us - markDirtyRow(ti,tableName, oldRow); - } - - public Set<String> getMusicTableSet(String ns) { - Set<String> set = new TreeSet<String>(); - String cql = String.format("SELECT TABLE_NAME FROM SYSTEM_SCHEMA.TABLES WHERE KEYSPACE_NAME = '%s'", ns); - ResultSet rs = executeMusicRead(cql); - for (Row row : rs) { - set.add(row.getString("TABLE_NAME").toUpperCase()); - } - return set; - } - /** - * This method is called whenever there is a SELECT on a local SQL table, wherein it first checks the local - * dirty bits table to see if there are any keys in Cassandra whose value has not yet been sent to SQL - * @param tableName This is the table on which the select is being performed - */ - @Override - public void readDirtyRowsAndUpdateDb(DBInterface dbi, String tableName) { - // Read dirty rows of this table from Music - TableInfo ti = dbi.getTableInfo(tableName); - List<Map<String,Object>> objlist = getDirtyRows(ti,tableName); - PreparedQueryObject pQueryObject = null; - String pre_cql = String.format("SELECT * FROM %s.%s WHERE ", music_ns, tableName); - List<Object> vallist = new ArrayList<Object>(); - StringBuilder sb = new StringBuilder(); - //\TODO Perform a batch operation instead of each row at a time - for (Map<String,Object> map : objlist) { - pQueryObject = new PreparedQueryObject(); - sb.setLength(0); - vallist.clear(); - String pfx = ""; - for (String key : map.keySet()) { - sb.append(pfx).append(key).append("=?"); - vallist.add(map.get(key)); - pQueryObject.addValue(map.get(key)); - pfx = " AND "; - } - - String cql = pre_cql + sb.toString(); - System.out.println("readDirtyRowsAndUpdateDb: cql: "+cql); - pQueryObject.appendQueryString(cql); - ResultSet dirtyRows = null; - try { - //\TODO Why is this an eventual put?, this should be an atomic - dirtyRows = MusicCore.get(pQueryObject); - } catch (MusicServiceException e) { - - e.printStackTrace(); - } - /* - Session sess = getMusicSession(); - PreparedStatement ps = getPreparedStatementFromCache(cql); - BoundStatement bound = ps.bind(vallist.toArray()); - bound.setReadTimeoutMillis(60000); - ResultSet dirtyRows = null; - synchronized (sess) { - dirtyRows = sess.execute(bound); - }*/ - List<Row> rows = dirtyRows.all(); - if (rows.isEmpty()) { - // No rows, the row must have been deleted - deleteRowFromSqlDb(dbi,tableName, map); - } else { - for (Row row : rows) { - writeMusicRowToSQLDb(dbi,tableName, row); - } - } - } - } - - private void deleteRowFromSqlDb(DBInterface dbi, String tableName, Map<String, Object> map) { - dbi.deleteRowFromSqlDb(tableName, map); - TableInfo ti = dbi.getTableInfo(tableName); - List<Object> vallist = new ArrayList<Object>(); - for (int i = 0; i < ti.columns.size(); i++) { - if (ti.iskey.get(i)) { - String col = ti.columns.get(i); - Object val = map.get(col); - vallist.add(val); - } - } - cleanDirtyRow(ti, tableName, new JSONObject(vallist)); - } - /** - * This functions copies the contents of a row in Music into the corresponding row in the SQL table - * @param tableName This is the name of the table in both Music and swl - * @param musicRow This is the row in Music that is being copied into SQL - */ - private void writeMusicRowToSQLDb(DBInterface dbi, String tableName, Row musicRow) { - // First construct the map of columns and their values - TableInfo ti = dbi.getTableInfo(tableName); - Map<String, Object> map = new HashMap<String, Object>(); - List<Object> vallist = new ArrayList<Object>(); - String rowid = tableName; - for (String col : ti.columns) { - Object val = getValue(musicRow, col); - map.put(col, val); - if (ti.iskey(col)) { - vallist.add(val); - rowid += "_" + val.toString(); - } - } - - logger.debug("Blocking rowid: "+rowid); - in_progress.add(rowid); // Block propagation of the following INSERT/UPDATE - - dbi.insertRowIntoSqlDb(tableName, map); - - logger.debug("Unblocking rowid: "+rowid); - in_progress.remove(rowid); // Unblock propagation - -// try { -// String sql = String.format("INSERT INTO %s (%s) VALUES (%s);", tableName, fields.toString(), values.toString()); -// executeSQLWrite(sql); -// } catch (SQLException e) { -// logger.debug("Insert failed because row exists, do an update"); -// // TODO - rewrite this UPDATE command should not update key fields -// String sql = String.format("UPDATE %s SET (%s) = (%s) WHERE %s", tableName, fields.toString(), values.toString(), where.toString()); -// try { -// executeSQLWrite(sql); -// } catch (SQLException e1) { -// e1.printStackTrace(); -// } -// } - - ti = dbi.getTableInfo(tableName); - cleanDirtyRow(ti, tableName, new JSONObject(vallist)); - -// String selectQuery = "select "+ primaryKeyName+" FROM "+tableName+" WHERE "+primaryKeyName+"="+primaryKeyValue+";"; -// java.sql.ResultSet rs = executeSQLRead(selectQuery); -// String dbWriteQuery=null; -// try { -// if(rs.next()){//this entry is there, do an update -// dbWriteQuery = "UPDATE "+tableName+" SET "+columnNameString+" = "+ valueString +"WHERE "+primaryKeyName+"="+primaryKeyValue+";"; -// }else -// dbWriteQuery = "INSERT INTO "+tableName+" VALUES"+valueString+";"; -// executeSQLWrite(dbWriteQuery); -// } catch (SQLException e) { -// // ZZTODO Auto-generated catch block -// e.printStackTrace(); -// } - - //clean the music dirty bits table -// String dirtyRowIdsTableName = music_ns+".DIRTY_"+tableName+"_"+myId; -// String deleteQuery = "DELETE FROM "+dirtyRowIdsTableName+" WHERE dirtyRowKeys=$$"+primaryKeyValue+"$$;"; -// executeMusicWriteQuery(deleteQuery); - } - private Object getValue(Row musicRow, String colname) { - ColumnDefinitions cdef = musicRow.getColumnDefinitions(); - DataType colType; - try { - colType= cdef.getType(colname); - } - catch(IllegalArgumentException e) { - logger.warn("Colname is not part of table metadata: "+e); - throw e; - } - String typeStr = colType.getName().toString().toUpperCase(); - switch (typeStr) { - case "BIGINT": - return musicRow.getLong(colname); - case "BOOLEAN": - return musicRow.getBool(colname); - case "BLOB": - return musicRow.getString(colname); - case "DATE": - return musicRow.getString(colname); - case "DECIMAL": - return musicRow.getDecimal(colname); - case "DOUBLE": - return musicRow.getDouble(colname); - case "SMALLINT": - case "INT": - return musicRow.getInt(colname); - case "TIMESTAMP": - return musicRow.getTimestamp(colname); - case "UUID": - return musicRow.getUUID(colname); - default: - logger.error(EELFLoggerDelegate.errorLogger, "UNEXPECTED COLUMN TYPE: columname="+colname+", columntype="+typeStr); - // fall thru - case "VARCHAR": - return musicRow.getString(colname); - } - } - - /** - * This method is called whenever there is an INSERT or UPDATE to a local SQL table, wherein it updates the - * MUSIC/Cassandra tables (both dirty bits and actual data) corresponding to the SQL write. Music propagates - * it to the other replicas. - * - * @param tableName This is the table that has changed. - * @param changedRow This is information about the row that has changed - */ - @Override - public void updateDirtyRowAndEntityTableInMusic(TableInfo ti, String tableName, JSONObject changedRow) { - // Build the CQL command - Object[] objects = getObjects(ti,tableName,changedRow); - StringBuilder fields = new StringBuilder(); - StringBuilder values = new StringBuilder(); - String rowid = tableName; - Object[] newrow = new Object[objects.length]; - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - String pfx = ""; - int keyoffset=0; - for (int i = 0; i < objects.length; i++) { - if (!ti.hasKey() && i==0) { - //We need to tack on cassandra's uid in place of a primary key - fields.append(MDBC_PRIMARYKEY_NAME); - values.append("?"); - newrow[i] = UUID.fromString((String) objects[i]); - pQueryObject.addValue(newrow[i]); - keyoffset=-1; - pfx = ", "; - continue; - } - fields.append(pfx).append(ti.columns.get(i+keyoffset)); - values.append(pfx).append("?"); - pfx = ", "; - if (objects[i] instanceof byte[]) { - // Cassandra doesn't seem to have a Codec to translate a byte[] to a ByteBuffer - newrow[i] = ByteBuffer.wrap((byte[]) objects[i]); - pQueryObject.addValue(newrow[i]); - } else if (objects[i] instanceof Reader) { - // Cassandra doesn't seem to have a Codec to translate a Reader to a ByteBuffer either... - newrow[i] = ByteBuffer.wrap(readBytesFromReader((Reader) objects[i])); - pQueryObject.addValue(newrow[i]); - } else { - newrow[i] = objects[i]; - pQueryObject.addValue(newrow[i]); - } - if (i+keyoffset>=0 && ti.iskey.get(i+keyoffset)) { - rowid += "_" + newrow[i].toString(); - } - } - - if (in_progress.contains(rowid)) { - // This call to updateDirtyRowAndEntityTableInMusic() was called as a result of a Cassandra -> H2 update; ignore - logger.debug(EELFLoggerDelegate.applicationLogger, "updateDirtyRowAndEntityTableInMusic: bypassing MUSIC update on "+rowid); - - } else { - // Update local MUSIC node. Note: in Cassandra you can insert again on an existing key..it becomes an update - String cql = String.format("INSERT INTO %s.%s (%s) VALUES (%s);", music_ns, tableName, fields.toString(), values.toString()); - - pQueryObject.appendQueryString(cql); - String primaryKey = getMusicKeyFromRow(ti,tableName, changedRow); - updateMusicDB(tableName, primaryKey, pQueryObject); - - /*PreparedStatement ps = getPreparedStatementFromCache(cql); - BoundStatement bound = ps.bind(newrow); - bound.setReadTimeoutMillis(60000); - Session sess = getMusicSession(); - synchronized (sess) { - sess.execute(bound); - }*/ - // Mark the dirty rows in music for all the replicas but us - markDirtyRow(ti,tableName, changedRow); - } - } - - - - private byte[] readBytesFromReader(Reader rdr) { - StringBuilder sb = new StringBuilder(); - try { - int ch; - while ((ch = rdr.read()) >= 0) { - sb.append((char)ch); - } - } catch (IOException e) { - logger.warn("readBytesFromReader: "+e); - } - return sb.toString().getBytes(); - } - - protected PreparedStatement getPreparedStatementFromCache(String cql) { - // Note: have to hope that the Session never changes! - if (!ps_cache.containsKey(cql)) { - Session sess = getMusicSession(); - PreparedStatement ps = sess.prepare(cql); - ps_cache.put(cql, ps); - } - return ps_cache.get(cql); - } - - /** - * This method gets a connection to Music - * @return the Cassandra Session to use - */ - protected Session getMusicSession() { - // create cassandra session - if (musicSession == null) { - logger.info(EELFLoggerDelegate.applicationLogger, "Creating New Music Session"); - mCon = new MusicConnector(musicAddress); - musicSession = mCon.getSession(); - } - return musicSession; - } - - /** - * This method executes a write query in Music - * @param cql the CQL to be sent to Cassandra - */ - protected void executeMusicWriteQuery(String cql) { - logger.debug(EELFLoggerDelegate.applicationLogger, "Executing MUSIC write:"+ cql); - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - ReturnType rt = MusicCore.eventualPut(pQueryObject); - if(rt.getResult().getResult().toLowerCase().equals("failure")) { - logger.error(EELFLoggerDelegate.errorLogger, "Failure while eventualPut...: "+rt.getMessage()); - } - /*Session sess = getMusicSession(); - SimpleStatement s = new SimpleStatement(cql); - s.setReadTimeoutMillis(60000); - synchronized (sess) { - sess.execute(s); - }*/ - } - - /** - * This method executes a read query in Music - * @param cql the CQL to be sent to Cassandra - * @return a ResultSet containing the rows returned from the query - */ - protected ResultSet executeMusicRead(String cql) { - logger.debug(EELFLoggerDelegate.applicationLogger, "Executing MUSIC write:"+ cql); - PreparedQueryObject pQueryObject = new PreparedQueryObject(); - pQueryObject.appendQueryString(cql); - ResultSet results = null; - try { - results = MusicCore.get(pQueryObject); - } catch (MusicServiceException e) { - - e.printStackTrace(); - } - return results; - /*Session sess = getMusicSession(); - synchronized (sess) { - return sess.execute(cql); - }*/ - } - - /** - * Returns the default primary key name that this mixin uses - */ - public String getMusicDefaultPrimaryKeyName() { - return MDBC_PRIMARYKEY_NAME; - } - - /** - * Return the function for cassandra's primary key generation - */ - public String generateUniqueKey() { - return DatabaseOperations.generateUniqueKey().toString(); - } - - @Override - public String getMusicKeyFromRowWithoutPrimaryIndexes(TableInfo ti, String table, JSONObject dbRow) { - //\TODO this operation is super expensive to perform, both latency and BW - // it is better to add additional where clauses, and have the primary key - // to be composed of known columns of the table - // Adding this primary indexes would be an additional burden to the developers, which spanner - // also does, but otherwise performance is really bad - // At least it should have a set of columns that are guaranteed to be unique - StringBuilder cqlOperation = new StringBuilder(); - cqlOperation.append("SELECT * FROM ") - .append(music_ns) - .append(".") - .append(table); - ResultSet musicResults = executeMusicRead(cqlOperation.toString()); - Object[] dbRowObjects = getObjects(ti,table,dbRow); - while (!musicResults.isExhausted()) { - Row musicRow = musicResults.one(); - if (rowIs(ti, musicRow, dbRowObjects)) { - return ((UUID)getValue(musicRow, MDBC_PRIMARYKEY_NAME)).toString(); - } - } - //should never reach here - return null; - } - - /** - * Checks to see if this row is in list of database entries - * @param ti - * @param musicRow - * @param dbRow - * @return - */ - private boolean rowIs(TableInfo ti, Row musicRow, Object[] dbRow) { - //System.out.println("Comparing " + musicRow.toString()); - boolean sameRow=true; - for (int i=0; i<ti.columns.size(); i++) { - Object val = getValue(musicRow, ti.columns.get(i)); - if (!dbRow[i].equals(val)) { - sameRow=false; - break; - } - } - return sameRow; - } - - @Override - public String getMusicKeyFromRow(TableInfo ti, String tableName, JSONObject row) { - List<String> keyCols = ti.getKeyColumns(); - if(keyCols.isEmpty()){ - throw new IllegalArgumentException("Table doesn't have defined primary indexes "); - } - StringBuilder key = new StringBuilder(); - String pfx = ""; - for(String keyCol: keyCols) { - key.append(pfx); - key.append(row.getString(keyCol)); - pfx = ","; - } - String keyStr = key.toString(); - return keyStr; - } - - public void updateMusicDB(String tableName, String primaryKey, PreparedQueryObject pQObject) { - if(MusicMixin.criticalTables.contains(tableName)) { - ReturnType rt = null; - try { - rt = MusicCore.atomicPut(music_ns, tableName, primaryKey, pQObject, null); - } catch (MusicLockingException e) { - e.printStackTrace(); - } catch (MusicServiceException e) { - e.printStackTrace(); - } catch (MusicQueryException e) { - e.printStackTrace(); - } - if(rt.getResult().getResult().toLowerCase().equals("failure")) { - System.out.println("Failure while critical put..."+rt.getMessage()); - } - } else { - ReturnType rt = MusicCore.eventualPut(pQObject); - if(rt.getResult().getResult().toLowerCase().equals("failure")) { - System.out.println("Failure while critical put..."+rt.getMessage()); - } - } - } - - - private PreparedQueryObject createAppendMtxdIndexToMriQuery(String mriTable, UUID uuid, String table, UUID redoUuid){ - PreparedQueryObject query = new PreparedQueryObject(); - StringBuilder appendBuilder = new StringBuilder(); - appendBuilder.append("UPDATE ") - .append(music_ns) - .append(".") - .append(mriTable) - .append(" SET txredolog = txredolog +[('") - .append(table) - .append("',") - .append(redoUuid) - .append(")] WHERE rangeid = ") - .append(uuid) - .append(";"); - query.appendQueryString(appendBuilder.toString()); - return query; - } - - protected String createAndAssignLock(String fullyQualifiedKey, DatabasePartition partition) throws MDBCServiceException { - MriReference mriIndex = partition.getMusicRangeInformationIndex(); - String lockId; - lockId = MusicCore.createLockReference(fullyQualifiedKey); - //\TODO Handle better failures to acquire locks - ReturnType lockReturn; - try { - lockReturn = MusicCore.acquireLock(fullyQualifiedKey,lockId); - } catch (MusicLockingException e) { - logger.error(EELFLoggerDelegate.errorLogger, "Lock was not acquire correctly for key "+fullyQualifiedKey); - throw new MDBCServiceException("Lock was not acquire correctly for key "+fullyQualifiedKey); - } catch (MusicServiceException e) { - logger.error(EELFLoggerDelegate.errorLogger, "Error in music, when locking key: "+fullyQualifiedKey); - throw new MDBCServiceException("Error in music, when locking: "+fullyQualifiedKey); - } catch (MusicQueryException e) { - logger.error(EELFLoggerDelegate.errorLogger, "Error in executing query music, when locking key: "+fullyQualifiedKey); - throw new MDBCServiceException("Error in executing query music, when locking: "+fullyQualifiedKey); - } - //\TODO this is wrong, we should have a better way to obtain a lock forcefully, clean the queue and obtain the lock - if(lockReturn.getResult().compareTo(ResultType.SUCCESS) != 0 ) { - try { - MusicCore.forciblyReleaseLock(fullyQualifiedKey,lockId); - CassaLockStore lockingServiceHandle = MusicCore.getLockingServiceHandle(); - CassaLockStore.LockObject lockOwner = lockingServiceHandle.peekLockQueue(music_ns, partition.getMusicRangeInformationTable(), mriIndex.index.toString()); - while(lockOwner.lockRef != lockId) { - MusicCore.forciblyReleaseLock(fullyQualifiedKey, lockOwner.lockRef); - try { - lockOwner = lockingServiceHandle.peekLockQueue(music_ns, partition.getMusicRangeInformationTable(), mriIndex.index.toString()); - } catch(NullPointerException e){ - //Ignore null pointer exception - lockId = MusicCore.createLockReference(fullyQualifiedKey); - break; - } - } - lockReturn = MusicCore.acquireLock(fullyQualifiedKey,lockId); - - } catch (MusicLockingException e) { - throw new MDBCServiceException("Could not lock the corresponding lock"); - } catch (MusicServiceException e) { - logger.error(EELFLoggerDelegate.errorLogger, "Error in music, when locking key: "+fullyQualifiedKey); - throw new MDBCServiceException("Error in music, when locking: "+fullyQualifiedKey); - } catch (MusicQueryException e) { - logger.error(EELFLoggerDelegate.errorLogger, "Error in executing query music, when locking key: "+fullyQualifiedKey); - throw new MDBCServiceException("Error in executing query music, when locking: "+fullyQualifiedKey); - } - } - if(lockReturn.getResult().compareTo(ResultType.SUCCESS) != 0 ) { - throw new MDBCServiceException("Could not lock the corresponding lock"); - } - //TODO: Java newbie here, verify that this lockId is actually assigned to the global DatabasePartition in the StateManager instance - partition.setLockId(lockId); - return lockId; - } - - - - protected void appendIndexToMri(String lockId, UUID commitId, UUID MriIndex) throws MDBCServiceException{ - PreparedQueryObject appendQuery = createAppendMtxdIndexToMriQuery(musicRangeInformationTableName, MriIndex, musicTxDigestTableName, commitId); - ReturnType returnType = MusicCore.criticalPut(music_ns, musicRangeInformationTableName, MriIndex.toString(), appendQuery, lockId, null); - if(returnType.getResult().compareTo(ResultType.SUCCESS) != 0 ){ - logger.error(EELFLoggerDelegate.errorLogger, "Error when executing append operation with return type: "+returnType.getMessage()); - throw new MDBCServiceException("Error when executing append operation with return type: "+returnType.getMessage()); - } - } - - @Override - public void commitLog(DBInterface dbi, DatabasePartition partition, HashMap<Range,StagingTable> transactionDigest, String txId ,TxCommitProgress progressKeeper) throws MDBCServiceException{ - MriReference mriIndex = partition.getMusicRangeInformationIndex(); - if(mriIndex==null) { - //\TODO Fetch MriIndex from the Range Information Table - throw new MDBCServiceException("TIT Index retrieval not yet implemented"); - } - String fullyQualifiedMriKey = music_ns+"."+ mriIndex.table+"."+mriIndex.index.toString(); - //0. See if reference to lock was already created - String lockId = partition.getLockId(); - if(lockId == null || lockId.isEmpty()) { - lockId = createAndAssignLock(fullyQualifiedMriKey,partition); - } - - UUID commitId; - //Generate a local commit id - if(progressKeeper.containsTx(txId)) { - commitId = progressKeeper.getCommitId(txId); - } - else{ - logger.error(EELFLoggerDelegate.errorLogger, "Tx with id "+txId+" was not created in the TxCommitProgress "); - throw new MDBCServiceException("Tx with id "+txId+" was not created in the TxCommitProgress "); - } - //Add creation type of transaction digest - - //1. Push new row to RRT and obtain its index - String serializedTransactionDigest; - try { - serializedTransactionDigest = MDBCUtils.toString(transactionDigest); - } catch (IOException e) { - throw new MDBCServiceException("Failed to serialized transaction digest with error "+e.toString()); - } - MusicTxDigestId digestId = new MusicTxDigestId(commitId); - addTxDigest(musicTxDigestTableName, digestId, serializedTransactionDigest); - //2. Save RRT index to RQ - if(progressKeeper!= null) { - progressKeeper.setRecordId(txId,digestId); - } - //3. Append RRT index into the corresponding TIT row array - appendToRedoLog(mriIndex,partition,digestId); - } - - /** - * @param tableName - * @param string - * @param rowValues - * @return - */ - @SuppressWarnings("unused") - private String getUid(String tableName, String string, Object[] rowValues) { - // - // Update local MUSIC node. Note: in Cassandra you can insert again on an existing key..it becomes an update - String cql = String.format("SELECT * FROM %s.%s;", music_ns, tableName); - PreparedStatement ps = getPreparedStatementFromCache(cql); - BoundStatement bound = ps.bind(); - bound.setReadTimeoutMillis(60000); - Session sess = getMusicSession(); - ResultSet rs; - synchronized (sess) { - rs = sess.execute(bound); - } - - //should never reach here - logger.error(EELFLoggerDelegate.errorLogger, "Could not find the row in the primary key"); - return null; - } - - @Override - public Object[] getObjects(TableInfo ti, String tableName, JSONObject row) { - // \FIXME: we may need to add the primary key of the row if it was autogenerated by MUSIC - List<String> cols = ti.columns; - int size = cols.size(); - boolean hasDefault = false; - if(row.has(getMusicDefaultPrimaryKeyName())) { - size++; - hasDefault = true; - } - - Object[] objects = new Object[size]; - int idx = 0; - if(hasDefault) { - objects[idx++] = row.getString(getMusicDefaultPrimaryKeyName()); - } - for(String col : ti.columns) { - objects[idx]=row.get(col); - } - return objects; - } - - @Override - public MusicRangeInformationRow getMusicRangeInformation(DatabasePartition partition) throws MDBCServiceException { - //TODO: verify that lock id is valid before calling the database operations function - MriReference reference = partition.getMusicRangeInformationIndex(); - return DatabaseOperations.getMriRow(music_ns,reference.table,reference.index,partition.getLockId()); - } - - @Override - public DatabasePartition createMusicRangeInformation(MusicRangeInformationRow info) throws MDBCServiceException { - DatabasePartition newPartition = new DatabasePartition(info.partition.ranges,info.index, - musicRangeInformationTableName,null,musicTxDigestTableName); - String fullyQualifiedMriKey = music_ns+"."+ musicRangeInformationTableName+"."+info.index.toString(); - String lockId = createAndAssignLock(fullyQualifiedMriKey,newPartition); - DatabaseOperations.createEmptyMriRow(music_ns,musicRangeInformationTableName,info.metricProcessId,lockId,info.partition.ranges); - throw new UnsupportedOperationException(); - } - - @Override - public void appendToRedoLog(MriReference mriRowId, DatabasePartition partition, MusicTxDigestId newRecord) throws MDBCServiceException { - PreparedQueryObject appendQuery = createAppendMtxdIndexToMriQuery(musicRangeInformationTableName, mriRowId.index, musicTxDigestTableName, newRecord.tablePrimaryKey); - ReturnType returnType = MusicCore.criticalPut(music_ns, musicRangeInformationTableName, mriRowId.index.toString(), appendQuery, partition.getLockId(), null); - if(returnType.getResult().compareTo(ResultType.SUCCESS) != 0 ){ - logger.error(EELFLoggerDelegate.errorLogger, "Error when executing append operation with return type: "+returnType.getMessage()); - throw new MDBCServiceException("Error when executing append operation with return type: "+returnType.getMessage()); - } - } - - @Override - public void addTxDigest(String musicTxDigestTable, MusicTxDigestId newId, String transactionDigest) throws MDBCServiceException { - DatabaseOperations.createTxDigestRow(music_ns,musicTxDigestTable,newId,transactionDigest); - } - - @Override - public PartitionInformation getPartitionInformation(DatabasePartition partition) throws MDBCServiceException { - //\TODO We may want to cache this information to avoid going to the database to obtain this simple information - MusicRangeInformationRow row = getMusicRangeInformation(partition); - return row.partition; - } - - @Override - public HashMap<Range,StagingTable> getTransactionDigest(MusicTxDigestId id) throws MDBCServiceException { - return DatabaseOperations.getTransactionDigest(music_ns, musicTxDigestTableName, id); - } - - @Override - public void own(List<Range> ranges){ - throw new UnsupportedOperationException(); - } - - @Override - public void appendRange(String rangeId, List<Range> ranges){ - throw new UnsupportedOperationException(); - } - - @Override - public void relinquish(String ownerId, String rangeId){ - throw new UnsupportedOperationException(); - } -} diff --git a/src/main/java/org/onap/music/mdbc/mixins/DBInterface.java b/src/main/java/org/onap/music/mdbc/mixins/DBInterface.java deleted file mode 100755 index 2ff88a2..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/DBInterface.java +++ /dev/null @@ -1,92 +0,0 @@ -package org.onap.music.mdbc.mixins; - -import java.sql.ResultSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.onap.music.mdbc.Range; -import org.onap.music.mdbc.TableInfo; -import org.onap.music.mdbc.tables.StagingTable; - -/** - * This Interface defines the methods that MDBC needs in order to mirror data to/from a Database instance. - * - * @author Robert P. Eby - */ -public interface DBInterface { - /** - * Get the name of this DBnterface mixin object. - * @return the name - */ - String getMixinName(); - /** - * Do what is needed to close down the database connection. - */ - void close(); - /** - * Get a set of the table names in the database. The table names should be returned in UPPER CASE. - * @return the set - */ - Set<String> getSQLTableSet(); - /** - * Return the name of the database that the driver is connected to - * @return - */ - String getDatabaseName(); - /** - * Return a TableInfo object for the specified table. - * @param tableName the table to look up - * @return a TableInfo object containing the info we need, or null if the table does not exist - */ - TableInfo getTableInfo(String tableName); - /** - * This method should create triggers in the database to be called for each row after every INSERT, - * UPDATE and DELETE, and before every SELECT. - * @param tableName this is the table on which triggers are being created. - */ - void createSQLTriggers(String tableName); - /** - * This method should drop all triggers previously created in the database for the table. - * @param tableName this is the table on which triggers are being dropped. - */ - void dropSQLTriggers(String tableName); - /** - * This method inserts a row into the SQL database, defined via a map of column names and values. - * @param tableName the table to insert the row into - * @param map map of column names → values to use for the keys when inserting the row - */ - void insertRowIntoSqlDb(String tableName, Map<String, Object> map); - /** - * This method deletes a row from the SQL database, defined via a map of column names and values. - * @param tableName the table to delete the row from - * @param map map of column names → values to use for the keys when deleting the row - */ - void deleteRowFromSqlDb(String tableName, Map<String, Object> map); - /** - * Code to be run within the DB driver before a SQL statement is executed. This is where tables - * can be synchronized before a SELECT, for those databases that do not support SELECT triggers. - * @param sql the SQL statement that is about to be executed - */ - void preStatementHook(final String sql); - /** - * Code to be run within the DB driver after a SQL statement has been executed. This is where remote - * statement actions can be copied back to Cassandra/MUSIC. - * @param sql the SQL statement that was executed - * @param transactionDigest - */ - void postStatementHook(final String sql,Map<Range,StagingTable> transactionDigest); - /** - * This method executes a read query in the SQL database. Methods that call this method should be sure - * to call resultset.getStatement().close() when done in order to free up resources. - * @param sql the query to run - * @return a ResultSet containing the rows returned from the query - */ - ResultSet executeSQLRead(String sql); - - void synchronizeData(String tableName); - - List<String> getReservedTblNames(); - - String getPrimaryKey(String sql, String tableName); -} diff --git a/src/main/java/org/onap/music/mdbc/mixins/MixinFactory.java b/src/main/java/org/onap/music/mdbc/mixins/MixinFactory.java deleted file mode 100755 index c0c6a64..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/MixinFactory.java +++ /dev/null @@ -1,125 +0,0 @@ -package org.onap.music.mdbc.mixins; - -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.sql.Connection; -import java.util.Properties; - -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.mdbc.DatabasePartition; -import org.onap.music.mdbc.MusicSqlManager; - -/** - * This class is used to construct instances of Mixins that implement either the {@link org.onap.music.mdbc.mixins.DBInterface} - * interface, or the {@link org.onap.music.mdbc.mixins.MusicInterface} interface. The Mixins are searched for in the CLASSPATH. - * - * @author Robert P. Eby - */ -public class MixinFactory { - private static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(MixinFactory.class); - - // Only static methods... - private MixinFactory(){} - - /** - * Look for a class in CLASSPATH that implements the {@link DBInterface} interface, and has the mixin name <i>name</i>. - * If one is found, construct and return it, using the other arguments for the constructor. - * @param name the name of the Mixin - * @param msm the MusicSqlManager to use as an argument to the constructor - * @param url the URL to use as an argument to the constructor - * @param conn the underlying JDBC Connection - * @param info the Properties to use as an argument to the constructor - * @return the newly constructed DBInterface, or null if one cannot be found. - */ - public static DBInterface createDBInterface(String name, MusicSqlManager msm, String url, Connection conn, Properties info) { - for (Class<?> cl : Utils.getClassesImplementing(DBInterface.class)) { - try { - Constructor<?> con = cl.getConstructor(); - if (con != null) { - DBInterface dbi = (DBInterface) con.newInstance(); - String miname = dbi.getMixinName(); - logger.info(EELFLoggerDelegate.applicationLogger,"Checking "+miname); - if (miname.equalsIgnoreCase(name)) { - con = cl.getConstructor(MusicSqlManager.class, String.class, Connection.class, Properties.class); - if (con != null) { - logger.info(EELFLoggerDelegate.applicationLogger,"Found match: "+miname); - return (DBInterface) con.newInstance(msm, url, conn, info); - } - } - } - } catch (Exception e) { - logger.error(EELFLoggerDelegate.errorLogger,"createDBInterface: "+e); - } - } - return null; - } - /** - * Look for a class in CLASSPATH that implements the {@link MusicInterface} interface, and has the mixin name <i>name</i>. - * If one is found, construct and return it, using the other arguments for the constructor. - * @param name the name of the Mixin - * @param msm the MusicSqlManager to use as an argument to the constructor - * @param dbi the DBInterface to use as an argument to the constructor - * @param url the URL to use as an argument to the constructor - * @param info the Properties to use as an argument to the constructor - * @return the newly constructed MusicInterface, or null if one cannot be found. - */ - public static MusicInterface createMusicInterface(String name, String url, Properties info) { - for (Class<?> cl : Utils.getClassesImplementing(MusicInterface.class)) { - try { - Constructor<?> con = cl.getConstructor(); - if (con != null) { //TODO: is this necessary? Don't think it could ever be null? - MusicInterface mi = (MusicInterface) con.newInstance(); - String miname = mi.getMixinName(); - logger.info(EELFLoggerDelegate.applicationLogger, "Checking "+miname); - if (miname.equalsIgnoreCase(name)) { - con = cl.getConstructor(String.class, Properties.class); - if (con != null) { - logger.info(EELFLoggerDelegate.applicationLogger,"Found match: "+miname); - return (MusicInterface) con.newInstance(url, info); - } - } - } - } catch (InvocationTargetException e) { - logger.error(EELFLoggerDelegate.errorLogger,"createMusicInterface: "+e.getCause().toString()); - } - catch (Exception e) { - logger.error(EELFLoggerDelegate.errorLogger,"createMusicInterface: "+e); - } - } - return null; - } - - // Unfortunately, this version does not work when MDBC is built as a JBoss module, - // where something funny is happening with the classloaders -// @SuppressWarnings("unused") -// private static List<Class<?>> getClassesImplementingOld(Class<?> implx) { -// List<Class<?>> list = new ArrayList<Class<?>>(); -// try { -// ClassLoader cldr = MixinFactory.class.getClassLoader(); -// while (cldr != null) { -// ClassPath cp = ClassPath.from(cldr); -// for (ClassPath.ClassInfo x : cp.getAllClasses()) { -// if (x.toString().startsWith("com.att.")) { // mixins must have a package starting with com.att. -// Class<?> cl = x.load(); -// if (impl(cl, implx)) { -// list.add(cl); -// } -// } -// } -// cldr = cldr.getParent(); -// } -// } catch (IOException e) { -// // ignore -// } -// return list; -// } - static boolean impl(Class<?> cl, Class<?> imp) { - for (Class<?> c2 : cl.getInterfaces()) { - if (c2 == imp) { - return true; - } - } - Class<?> c2 = cl.getSuperclass(); - return (c2 != null) ? impl(c2, imp) : false; - } -} diff --git a/src/main/java/org/onap/music/mdbc/mixins/MusicConnector.java b/src/main/java/org/onap/music/mdbc/mixins/MusicConnector.java deleted file mode 100755 index 11322fe..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/MusicConnector.java +++ /dev/null @@ -1,124 +0,0 @@ -package org.onap.music.mdbc.mixins; - -import java.net.InetAddress; -import java.net.NetworkInterface; -import java.net.SocketException; -import java.util.ArrayList; -import java.util.Enumeration; -import java.util.Iterator; -import java.util.List; - -import org.onap.music.logging.EELFLoggerDelegate; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.HostDistance; -import com.datastax.driver.core.Metadata; -import com.datastax.driver.core.PoolingOptions; -import com.datastax.driver.core.Session; -import com.datastax.driver.core.exceptions.NoHostAvailableException; -import org.onap.music.main.MusicCore; - -/** - * This class allows for management of the Cassandra Cluster and Session objects. - * - * @author Robert P. Eby - */ -public class MusicConnector { - - private EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(MusicConnector.class); - - private Session session; - private Cluster cluster; - - protected MusicConnector() { - //to defeat instantiation since this is a singleton - } - - public MusicConnector(String address) { -// connectToCassaCluster(address); - connectToMultipleAddresses(address); - } - - public Session getSession() { - return session; - } - - public void close() { - if (session != null) - session.close(); - session = null; - if (cluster != null) - cluster.close(); - cluster = null; - } - - private List<String> getAllPossibleLocalIps(){ - ArrayList<String> allPossibleIps = new ArrayList<String>(); - try { - Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces(); - while(en.hasMoreElements()){ - NetworkInterface ni=(NetworkInterface) en.nextElement(); - Enumeration<InetAddress> ee = ni.getInetAddresses(); - while(ee.hasMoreElements()) { - InetAddress ia= (InetAddress) ee.nextElement(); - allPossibleIps.add(ia.getHostAddress()); - } - } - } catch (SocketException e) { - e.printStackTrace(); - } - return allPossibleIps; - } - - private void connectToMultipleAddresses(String address) { - MusicCore.getDSHandle(address); - /* - PoolingOptions poolingOptions = - new PoolingOptions() - .setConnectionsPerHost(HostDistance.LOCAL, 4, 10) - .setConnectionsPerHost(HostDistance.REMOTE, 2, 4); - String[] music_hosts = address.split(","); - if (cluster == null) { - logger.info(EELFLoggerDelegate.applicationLogger,"Initializing MUSIC Client with endpoints "+address); - cluster = Cluster.builder() - .withPort(9042) - .withPoolingOptions(poolingOptions) - .withoutMetrics() - .addContactPoints(music_hosts) - .build(); - Metadata metadata = cluster.getMetadata(); - logger.info(EELFLoggerDelegate.applicationLogger,"Connected to cluster:"+metadata.getClusterName()+" at address:"+address); - - } - session = cluster.connect(); - */ - } - - @SuppressWarnings("unused") - private void connectToCassaCluster(String address) { - PoolingOptions poolingOptions = - new PoolingOptions() - .setConnectionsPerHost(HostDistance.LOCAL, 4, 10) - .setConnectionsPerHost(HostDistance.REMOTE, 2, 4); - Iterator<String> it = getAllPossibleLocalIps().iterator(); - logger.info(EELFLoggerDelegate.applicationLogger,"Iterating through possible ips:"+getAllPossibleLocalIps()); - - while (it.hasNext()) { - try { - cluster = Cluster.builder() - .withPort(9042) - .withPoolingOptions(poolingOptions) - .withoutMetrics() - .addContactPoint(address) - .build(); - //cluster.getConfiguration().getSocketOptions().setReadTimeoutMillis(Integer.MAX_VALUE); - Metadata metadata = cluster.getMetadata(); - logger.info(EELFLoggerDelegate.applicationLogger,"Connected to cluster:"+metadata.getClusterName()+" at address:"+address); - - session = cluster.connect(); - break; - } catch (NoHostAvailableException e) { - address = it.next(); - } - } - } -} diff --git a/src/main/java/org/onap/music/mdbc/mixins/MusicInterface.java b/src/main/java/org/onap/music/mdbc/mixins/MusicInterface.java deleted file mode 100755 index 35cfb00..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/MusicInterface.java +++ /dev/null @@ -1,173 +0,0 @@ -package org.onap.music.mdbc.mixins; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; - -import org.json.JSONObject; - -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.mdbc.DatabasePartition; -import org.onap.music.mdbc.Range; -import org.onap.music.mdbc.TableInfo; -import org.onap.music.mdbc.tables.PartitionInformation; -import org.onap.music.mdbc.tables.MusicTxDigestId; -import org.onap.music.mdbc.tables.StagingTable; -import org.onap.music.mdbc.tables.MriReference; -import org.onap.music.mdbc.tables.MusicRangeInformationRow; -import org.onap.music.mdbc.tables.TxCommitProgress; - -/** - * This Interface defines the methods that MDBC needs for a class to provide access to the persistence layer of MUSIC. - * - * @author Robert P. Eby - */ -public interface MusicInterface { - /** - * This function is used to created all the required data structures, both local - * \TODO Check if this function is required in the MUSIC interface or could be just created on the constructor - */ - void initializeMetricDataStructures() throws MDBCServiceException; - /** - * Get the name of this MusicInterface mixin object. - * @return the name - */ - String getMixinName(); - /** - * Gets the name of this MusicInterface mixin's default primary key name - * @return default primary key name - */ - String getMusicDefaultPrimaryKeyName(); - /** - * generates a key or placeholder for what is required for a primary key - * @return a primary key - */ - String generateUniqueKey(); - - /** - * Find the key used with Music for a table that was created without a primary index - * Name is long to avoid developers using it. For cassandra performance in this operation - * is going to be really bad - * @param ti information of the table in the SQL layer - * @param table name of the table - * @param dbRow row obtained from the SQL layer - * @return key associated with the row - */ - String getMusicKeyFromRowWithoutPrimaryIndexes(TableInfo ti, String table, JSONObject dbRow); - /** - * Do what is needed to close down the MUSIC connection. - */ - void close(); - /** - * This method creates a keyspace in Music/Cassandra to store the data corresponding to the SQL tables. - * The keyspace name comes from the initialization properties passed to the JDBC driver. - */ - void createKeyspace(); - /** - * This method performs all necessary initialization in Music/Cassandra to store the table <i>tableName</i>. - * @param tableName the table to initialize MUSIC for - */ - void initializeMusicForTable(TableInfo ti, String tableName); - /** - * Create a <i>dirty row</i> table for the real table <i>tableName</i>. The primary keys columns from the real table are recreated in - * the dirty table, along with a "REPLICA__" column that names the replica that should update it's internal state from MUSIC. - * @param tableName the table to create a "dirty" table for - */ - void createDirtyRowTable(TableInfo ti, String tableName); - /** - * Drop the dirty row table for <i>tableName</i> from MUSIC. - * @param tableName the table being dropped - */ - void dropDirtyRowTable(String tableName); - /** - * Drops the named table and its dirty row table (for all replicas) from MUSIC. The dirty row table is dropped first. - * @param tableName This is the table that has been dropped - */ - void clearMusicForTable(String tableName); - /** - * Mark rows as "dirty" in the dirty rows table for <i>tableName</i>. Rows are marked for all replicas but - * this one (this replica already has the up to date data). - * @param tableName the table we are marking dirty - * @param keys an ordered list of the values being put into the table. The values that correspond to the tables' - * primary key are copied into the dirty row table. - */ - void markDirtyRow(TableInfo ti, String tableName, JSONObject keys); - /** - * Remove the entries from the dirty row (for this replica) that correspond to a set of primary keys - * @param tableName the table we are removing dirty entries from - * @param keys the primary key values to use in the DELETE. Note: this is *only* the primary keys, not a full table row. - */ - void cleanDirtyRow(TableInfo ti, String tableName, JSONObject keys); - /** - * Get a list of "dirty rows" for a table. The dirty rows returned apply only to this replica, - * and consist of a Map of primary key column names and values. - * @param tableName the table we are querying for - * @return a list of maps; each list item is a map of the primary key names and values for that "dirty row". - */ - List<Map<String,Object>> getDirtyRows(TableInfo ti, String tableName); - /** - * This method is called whenever there is a DELETE to a row on a local SQL table, wherein it updates the - * MUSIC/Cassandra tables (both dirty bits and actual data) corresponding to the SQL write. MUSIC propagates - * it to the other replicas. - * @param tableName This is the table that has changed. - * @param oldRow This is a copy of the old row being deleted - */ - void deleteFromEntityTableInMusic(TableInfo ti,String tableName, JSONObject oldRow); - /** - * This method is called whenever there is a SELECT on a local SQL table, wherein it first checks the local - * dirty bits table to see if there are any rows in Cassandra whose value needs to be copied to the local SQL DB. - * @param tableName This is the table on which the select is being performed - */ - void readDirtyRowsAndUpdateDb(DBInterface dbi, String tableName); - /** - * This method is called whenever there is an INSERT or UPDATE to a local SQL table, wherein it updates the - * MUSIC/Cassandra tables (both dirty bits and actual data) corresponding to the SQL write. Music propagates - * it to the other replicas. - * @param tableName This is the table that has changed. - * @param changedRow This is information about the row that has changed - */ - void updateDirtyRowAndEntityTableInMusic(TableInfo ti, String tableName, JSONObject changedRow); - - Object[] getObjects(TableInfo ti, String tableName, JSONObject row); - /** - * Returns the primary key associated with the given row - * @param ti info of the table that is associated with the row - * @param tableName name of the table that contains the row - * @param changedRow row that is going to contain the information associated with the primary key - * @return primary key of the row - */ - String getMusicKeyFromRow(TableInfo ti, String tableName, JSONObject changedRow); - - /** - * Commits the corresponding REDO-log into MUSIC - * - * @param dbi, the database interface use in the local SQL cache, where the music interface is being used - * @param partition - * @param transactionDigest digest of the transaction that is being committed into the Redo log in music. It has to be a HashMap, because it is required to be serializable - * @param txId id associated with the log being send - * @param progressKeeper data structure that is used to handle to detect failures, and know what to do - * @throws MDBCServiceException - */ - void commitLog(DBInterface dbi, DatabasePartition partition, HashMap<Range,StagingTable> transactionDigest, String txId,TxCommitProgress progressKeeper) throws MDBCServiceException; - - MusicRangeInformationRow getMusicRangeInformation(DatabasePartition partition) throws MDBCServiceException; - - DatabasePartition createMusicRangeInformation(MusicRangeInformationRow info) throws MDBCServiceException; - - void appendToRedoLog(MriReference mriRowId, DatabasePartition partition, MusicTxDigestId newRecord) throws MDBCServiceException; - - void addTxDigest(String musicTxDigestTable, MusicTxDigestId newId, String transactionDigest) throws MDBCServiceException; - - PartitionInformation getPartitionInformation(DatabasePartition partition) throws MDBCServiceException; - - HashMap<Range,StagingTable> getTransactionDigest(MusicTxDigestId id) throws MDBCServiceException; - - void own(List<Range> ranges); - - void appendRange(String rangeId, List<Range> ranges); - - void relinquish(String ownerId, String rangeId); - -} - diff --git a/src/main/java/org/onap/music/mdbc/mixins/MusicMixin.java b/src/main/java/org/onap/music/mdbc/mixins/MusicMixin.java deleted file mode 100644 index 58ed35c..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/MusicMixin.java +++ /dev/null @@ -1,233 +0,0 @@ -package org.onap.music.mdbc.mixins; - -import java.io.IOException; -import java.io.InputStream; -import java.util.*; - -import org.onap.music.mdbc.LockId; -import org.json.JSONObject; -import org.onap.music.exceptions.MusicLockingException; - -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.mdbc.DatabasePartition; -import org.onap.music.mdbc.Range; -import org.onap.music.mdbc.TableInfo; -import org.onap.music.mdbc.tables.PartitionInformation; -import org.onap.music.mdbc.tables.MusicTxDigestId; -import org.onap.music.mdbc.tables.StagingTable; -import org.onap.music.mdbc.tables.MriReference; -import org.onap.music.mdbc.tables.MusicRangeInformationRow; -import org.onap.music.mdbc.tables.TxCommitProgress; - -import org.onap.music.main.MusicCore; - -/** - - * - */ -public class MusicMixin implements MusicInterface { - - public static Map<Integer, Set<String>> currentLockMap = new HashMap<>(); - public static List<String> criticalTables = new ArrayList<>(); - - @Override - public String getMixinName() { - // - return null; - } - - @Override - public String getMusicDefaultPrimaryKeyName() { - // - return null; - } - - @Override - public String generateUniqueKey() { - // - return null; - } - - @Override - public String getMusicKeyFromRow(TableInfo ti, String table, JSONObject dbRow) { - // - return null; - } - - @Override - public void close() { - // - - } - - @Override - public void createKeyspace() { - // - - } - - @Override - public void initializeMusicForTable(TableInfo ti, String tableName) { - // - - } - - @Override - public void createDirtyRowTable(TableInfo ti, String tableName) { - // - - } - - @Override - public void dropDirtyRowTable(String tableName) { - // - - } - - @Override - public void clearMusicForTable(String tableName) { - // - - } - - @Override - public void markDirtyRow(TableInfo ti, String tableName, JSONObject keys) { - // - - } - - @Override - public void cleanDirtyRow(TableInfo ti, String tableName, JSONObject keys) { - // - - } - - @Override - public List<Map<String, Object>> getDirtyRows(TableInfo ti, String tableName) { - // - return null; - } - - @Override - public void deleteFromEntityTableInMusic(TableInfo ti, String tableName, JSONObject oldRow) { - // - - } - - @Override - public void readDirtyRowsAndUpdateDb(DBInterface dbi, String tableName) { - // - - } - - @Override - public void updateDirtyRowAndEntityTableInMusic(TableInfo ti, String tableName, JSONObject changedRow) { - updateDirtyRowAndEntityTableInMusic(tableName, changedRow, false); - - } - - public void updateDirtyRowAndEntityTableInMusic(String tableName, JSONObject changedRow, boolean isCritical) { - } - - - public static void loadProperties() { - Properties prop = new Properties(); - InputStream input = null; - try { - input = MusicMixin.class.getClassLoader().getResourceAsStream("mdbc.properties"); - prop.load(input); - String crTable = prop.getProperty("critical.tables"); - String[] tableArr = crTable.split(","); - criticalTables = Arrays.asList(tableArr); - - } catch (Exception ex) { - ex.printStackTrace(); - } finally { - if (input != null) { - try { - input.close(); - } catch (IOException e) { - e.printStackTrace(); - } - } - } - } - - public static void releaseZKLocks(Set<LockId> lockIds) { - for (LockId lockId : lockIds) { - System.out.println("Releasing lock: " + lockId); - try { - MusicCore.voluntaryReleaseLock(lockId.getFullyQualifiedLockKey(), lockId.getLockReference()); - MusicCore.destroyLockRef(lockId.getFullyQualifiedLockKey(), lockId.getLockReference()); - } catch (MusicLockingException e) { - e.printStackTrace(); - } - } - } - - @Override - public String getMusicKeyFromRowWithoutPrimaryIndexes(TableInfo ti, String tableName, JSONObject changedRow) { - // - return null; - } - - @Override - public void initializeMetricDataStructures() { - // - - } - - @Override - public Object[] getObjects(TableInfo ti, String tableName, JSONObject row) { - return null; - } - - @Override - public void commitLog(DBInterface dbi, DatabasePartition partition, HashMap<Range, StagingTable> transactionDigest, String txId, TxCommitProgress progressKeeper) - throws MDBCServiceException { - // TODO Auto-generated method stub - } - - @Override - public HashMap<Range, StagingTable> getTransactionDigest(MusicTxDigestId id) { - return null; - } - - @Override - public PartitionInformation getPartitionInformation(DatabasePartition partition) { - return null; - } - - @Override - public DatabasePartition createMusicRangeInformation(MusicRangeInformationRow info) { - return null; - } - - @Override - public void appendToRedoLog(MriReference mriRowId, DatabasePartition partition, MusicTxDigestId newRecord) { - } - - @Override - public void addTxDigest(String musicTxDigestTable, MusicTxDigestId newId, String transactionDigest) { - } - - @Override - public void own(List<Range> ranges) { - throw new java.lang.UnsupportedOperationException("function not implemented yet"); - } - - @Override - public void appendRange(String rangeId, List<Range> ranges) { - throw new java.lang.UnsupportedOperationException("function not implemented yet"); - } - - @Override - public void relinquish(String ownerId, String rangeId) { - throw new java.lang.UnsupportedOperationException("function not implemented yet"); - } - - @Override - public MusicRangeInformationRow getMusicRangeInformation(DatabasePartition partition){ - return null; - } -} diff --git a/src/main/java/org/onap/music/mdbc/mixins/MySQLMixin.java b/src/main/java/org/onap/music/mdbc/mixins/MySQLMixin.java deleted file mode 100755 index d78bc9b..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/MySQLMixin.java +++ /dev/null @@ -1,786 +0,0 @@ -package org.onap.music.mdbc.mixins; - -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.sql.Types; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.TreeSet; - -import org.json.JSONObject; -import org.json.JSONTokener; - -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.mdbc.MusicSqlManager; -import org.onap.music.mdbc.Range; -import org.onap.music.mdbc.TableInfo; -import org.onap.music.mdbc.tables.OperationType; -import org.onap.music.mdbc.tables.StagingTable; - -import net.sf.jsqlparser.JSQLParserException; -import net.sf.jsqlparser.parser.CCJSqlParserUtil; -import net.sf.jsqlparser.statement.delete.Delete; -import net.sf.jsqlparser.statement.insert.Insert; -import net.sf.jsqlparser.statement.update.Update; - -/** - * This class provides the methods that MDBC needs in order to mirror data to/from a - * <a href="https://dev.mysql.com/">MySQL</a> or <a href="http://mariadb.org/">MariaDB</a> database instance. - * This class uses the <code>JSON_OBJECT()</code> database function, which means it requires the following - * minimum versions of either database: - * <table summary=""> - * <tr><th>DATABASE</th><th>VERSION</th></tr> - * <tr><td>MySQL</td><td>5.7.8</td></tr> - * <tr><td>MariaDB</td><td>10.2.3 (Note: 10.2.3 is currently (July 2017) a <i>beta</i> release)</td></tr> - * </table> - * - * @author Robert P. Eby - */ -public class MySQLMixin implements DBInterface { - private EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(MySQLMixin.class); - - public static final String MIXIN_NAME = "mysql"; - public static final String TRANS_TBL = "MDBC_TRANSLOG"; - private static final String CREATE_TBL_SQL = - "CREATE TABLE IF NOT EXISTS "+TRANS_TBL+ - " (IX INT AUTO_INCREMENT, OP CHAR(1), TABLENAME VARCHAR(255), NEWROWDATA VARCHAR(1024), KEYDATA VARCHAR(1024), CONNECTION_ID INT,PRIMARY KEY (IX))"; - - private final MusicSqlManager msm; - private final int connId; - private final String dbName; - private final Connection dbConnection; - private final Map<String, TableInfo> tables; - private boolean server_tbl_created = false; - - public MySQLMixin() { - this.msm = null; - this.connId = 0; - this.dbName = null; - this.dbConnection = null; - this.tables = null; - } - public MySQLMixin(MusicSqlManager msm, String url, Connection conn, Properties info) { - this.msm = msm; - this.connId = generateConnID(conn); - this.dbName = getDBName(conn); - this.dbConnection = conn; - this.tables = new HashMap<String, TableInfo>(); - } - // This is used to generate a unique connId for this connection to the DB. - private int generateConnID(Connection conn) { - int rv = (int) System.currentTimeMillis(); // random-ish - try { - Statement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery("SELECT CONNECTION_ID() AS IX"); - if (rs.next()) { - rv = rs.getInt("IX"); - } - stmt.close(); - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger,"generateConnID: problem generating a connection ID!"); - } - return rv; - } - - /** - * Get the name of this DBnterface mixin object. - * @return the name - */ - @Override - public String getMixinName() { - return MIXIN_NAME; - } - - @Override - public void close() { - // nothing yet - } - - /** - * Determines the db name associated with the connection - * This is the private/internal method that actually determines the name - * @param conn - * @return - */ - private String getDBName(Connection conn) { - String dbname = "mdbc"; //default name - try { - Statement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery("SELECT DATABASE() AS DB"); - if (rs.next()) { - dbname = rs.getString("DB"); - } - stmt.close(); - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger, "getDBName: problem getting database name from mysql"); - } - return dbname; - } - - public String getDatabaseName() { - return this.dbName; - } - /** - * Get a set of the table names in the database. - * @return the set - */ - @Override - public Set<String> getSQLTableSet() { - Set<String> set = new TreeSet<String>(); - String sql = "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA=DATABASE() AND TABLE_TYPE='BASE TABLE'"; - try { - Statement stmt = dbConnection.createStatement(); - ResultSet rs = stmt.executeQuery(sql); - while (rs.next()) { - String s = rs.getString("TABLE_NAME"); - set.add(s); - } - stmt.close(); - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger,"getSQLTableSet: "+e); - } - logger.debug(EELFLoggerDelegate.applicationLogger,"getSQLTableSet returning: "+ set); - return set; - } -/* -mysql> describe tables; -+-----------------+---------------------+------+-----+---------+-------+ -| Field | Type | Null | Key | Default | Extra | -+-----------------+---------------------+------+-----+---------+-------+ -| TABLE_CATALOG | varchar(512) | NO | | | | -| TABLE_SCHEMA | varchar(64) | NO | | | | -| TABLE_NAME | varchar(64) | NO | | | | -| TABLE_TYPE | varchar(64) | NO | | | | -| ENGINE | varchar(64) | YES | | NULL | | -| VERSION | bigint(21) unsigned | YES | | NULL | | -| ROW_FORMAT | varchar(10) | YES | | NULL | | -| TABLE_ROWS | bigint(21) unsigned | YES | | NULL | | -| AVG_ROW_LENGTH | bigint(21) unsigned | YES | | NULL | | -| DATA_LENGTH | bigint(21) unsigned | YES | | NULL | | -| MAX_DATA_LENGTH | bigint(21) unsigned | YES | | NULL | | -| INDEX_LENGTH | bigint(21) unsigned | YES | | NULL | | -| DATA_FREE | bigint(21) unsigned | YES | | NULL | | -| AUTO_INCREMENT | bigint(21) unsigned | YES | | NULL | | -| CREATE_TIME | datetime | YES | | NULL | | -| UPDATE_TIME | datetime | YES | | NULL | | -| CHECK_TIME | datetime | YES | | NULL | | -| TABLE_COLLATION | varchar(32) | YES | | NULL | | -| CHECKSUM | bigint(21) unsigned | YES | | NULL | | -| CREATE_OPTIONS | varchar(255) | YES | | NULL | | -| TABLE_COMMENT | varchar(2048) | NO | | | | -+-----------------+---------------------+------+-----+---------+-------+ - */ - /** - * Return a TableInfo object for the specified table. - * This method first looks in a cache of previously constructed TableInfo objects for the table. - * If not found, it queries the INFORMATION_SCHEMA.COLUMNS table to obtain the column names, types, and indexes of the table. - * It creates a new TableInfo object with the results. - * @param tableName the table to look up - * @return a TableInfo object containing the info we need, or null if the table does not exist - */ - @Override - public TableInfo getTableInfo(String tableName) { - TableInfo ti = tables.get(tableName); - if (ti == null) { - try { - String tbl = tableName;//.toUpperCase(); - String sql = "SELECT COLUMN_NAME, DATA_TYPE, COLUMN_KEY FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA=DATABASE() AND TABLE_NAME='"+tbl+"'"; - ResultSet rs = executeSQLRead(sql); - if (rs != null) { - ti = new TableInfo(); - while (rs.next()) { - String name = rs.getString("COLUMN_NAME"); - String type = rs.getString("DATA_TYPE"); - String ckey = rs.getString("COLUMN_KEY"); - ti.columns.add(name); - ti.coltype.add(mapDatatypeNameToType(type)); - ti.iskey.add(ckey != null && !ckey.equals("")); - } - rs.getStatement().close(); - } else { - logger.error(EELFLoggerDelegate.errorLogger,"Cannot retrieve table info for table "+tableName+" from MySQL."); - } - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger,"Cannot retrieve table info for table "+tableName+" from MySQL: "+e); - return null; - } - tables.put(tableName, ti); - } - return ti; - } - // Map MySQL data type names to the java.sql.Types equivalent - private int mapDatatypeNameToType(String nm) { - switch (nm) { - case "tinyint": return Types.TINYINT; - case "smallint": return Types.SMALLINT; - case "mediumint": - case "int": return Types.INTEGER; - case "bigint": return Types.BIGINT; - case "decimal": - case "numeric": return Types.DECIMAL; - case "float": return Types.FLOAT; - case "double": return Types.DOUBLE; - case "date": - case "datetime": return Types.DATE; - case "time": return Types.TIME; - case "timestamp": return Types.TIMESTAMP; - case "char": return Types.CHAR; - case "text": - case "varchar": return Types.VARCHAR; - case "mediumblob": - case "blob": return Types.VARCHAR; - default: - logger.error(EELFLoggerDelegate.errorLogger,"unrecognized and/or unsupported data type "+nm); - return Types.VARCHAR; - } - } - @Override - public void createSQLTriggers(String tableName) { - // Don't create triggers for the table the triggers write into!!! - if (tableName.equals(TRANS_TBL)) - return; - try { - if (!server_tbl_created) { - try { - Statement stmt = dbConnection.createStatement(); - stmt.execute(CREATE_TBL_SQL); - stmt.close(); - logger.info(EELFLoggerDelegate.applicationLogger,"createSQLTriggers: Server side dirty table created."); - server_tbl_created = true; - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger,"createSQLTriggers: problem creating the "+TRANS_TBL+" table!"); - } - } - - // Give the triggers a way to find this MSM - for (String name : getTriggerNames(tableName)) { - logger.info(EELFLoggerDelegate.applicationLogger,"ADD trigger "+name+" to msm_map"); - //\TODO fix this is an error - //msm.register(name); - } - // No SELECT trigger - executeSQLWrite(generateTrigger(tableName, "INSERT")); - executeSQLWrite(generateTrigger(tableName, "UPDATE")); - executeSQLWrite(generateTrigger(tableName, "DELETE")); - } catch (SQLException e) { - if (e.getMessage().equals("Trigger already exists")) { - //only warn if trigger already exists - logger.warn(EELFLoggerDelegate.applicationLogger, "createSQLTriggers" + e); - } else { - logger.error(EELFLoggerDelegate.errorLogger,"createSQLTriggers: "+e); - } - } - } -/* -CREATE TRIGGER `triggername` BEFORE UPDATE ON `table` -FOR EACH ROW BEGIN -INSERT INTO `log_table` ( `field1` `field2`, ...) VALUES ( NEW.`field1`, NEW.`field2`, ...) ; -END; - -OLD.field refers to the old value -NEW.field refers to the new value -*/ - private String generateTrigger(String tableName, String op) { - boolean isdelete = op.equals("DELETE"); - boolean isinsert = op.equals("INSERT"); - TableInfo ti = getTableInfo(tableName); - StringBuilder newJson = new StringBuilder("JSON_OBJECT("); // JSON_OBJECT(key, val, key, val) page 1766 - StringBuilder keyJson = new StringBuilder("JSON_OBJECT("); - String pfx = ""; - String keypfx = ""; - for (String col : ti.columns) { - newJson.append(pfx) - .append("'").append(col).append("', ") - .append(isdelete ? "OLD." : "NEW.") - .append(col); - if (ti.iskey(col) || !ti.hasKey()) { - keyJson.append(keypfx) - .append("'").append(col).append("', ") - .append(isinsert ? "NEW." : "OLD.") - .append(col); - keypfx = ", "; - } - pfx = ", "; - } - newJson.append(")"); - keyJson.append(")"); - //\TODO check if using mysql driver, so instead check the exception - StringBuilder sb = new StringBuilder() - .append("CREATE TRIGGER ") // IF NOT EXISTS not supported by MySQL! - .append(String.format("%s_%s", op.substring(0, 1), tableName)) - .append(" AFTER ") - .append(op) - .append(" ON ") - .append(tableName) - .append(" FOR EACH ROW INSERT INTO ") - .append(TRANS_TBL) - .append(" (TABLENAME, OP, NEWROWDATA, KEYDATA, CONNECTION_ID) VALUES('") - .append(tableName) - .append("', ") - .append(isdelete ? "'D'" : (op.equals("INSERT") ? "'I'" : "'U'")) - .append(", ") - .append(newJson.toString()) - .append(", ") - .append(keyJson.toString()) - .append(", ") - .append("CONNECTION_ID()") - .append(")"); - return sb.toString(); - } - private String[] getTriggerNames(String tableName) { - return new String[] { - "I_" + tableName, // INSERT trigger - "U_" + tableName, // UPDATE trigger - "D_" + tableName // DELETE trigger - }; - } - - @Override - public void dropSQLTriggers(String tableName) { - try { - for (String name : getTriggerNames(tableName)) { - logger.info(EELFLoggerDelegate.applicationLogger,"REMOVE trigger "+name+" from msmmap"); - executeSQLWrite("DROP TRIGGER IF EXISTS " +name); - //\TODO Fix this is an error - //msm.unregister(name); - } - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger,"dropSQLTriggers: "+e); - } - } - - @Override - public void insertRowIntoSqlDb(String tableName, Map<String, Object> map) { - TableInfo ti = getTableInfo(tableName); - String sql = ""; - if (rowExists(tableName, ti, map)) { - // Update - Construct the what and where strings for the DB write - StringBuilder what = new StringBuilder(); - StringBuilder where = new StringBuilder(); - String pfx = ""; - String pfx2 = ""; - for (int i = 0; i < ti.columns.size(); i++) { - String col = ti.columns.get(i); - String val = Utils.getStringValue(map.get(col)); - if (ti.iskey.get(i)) { - where.append(pfx).append(col).append("=").append(val); - pfx = " AND "; - } else { - what.append(pfx2).append(col).append("=").append(val); - pfx2 = ", "; - } - } - sql = String.format("UPDATE %s SET %s WHERE %s", tableName, what.toString(), where.toString()); - } else { - // Construct the value string and column name string for the DB write - StringBuilder fields = new StringBuilder(); - StringBuilder values = new StringBuilder(); - String pfx = ""; - for (String col : ti.columns) { - fields.append(pfx).append(col); - values.append(pfx).append(Utils.getStringValue(map.get(col))); - pfx = ", "; - } - sql = String.format("INSERT INTO %s (%s) VALUES (%s);", tableName, fields.toString(), values.toString()); - } - try { - executeSQLWrite(sql); - } catch (SQLException e1) { - logger.error(EELFLoggerDelegate.errorLogger,"executeSQLWrite: "+e1); - } - // TODO - remove any entries from MDBC_TRANSLOG corresponding to this update - // SELECT IX, OP, KEYDATA FROM MDBC_TRANS_TBL WHERE CONNID = "+connId AND TABLENAME = tblname - } - - private boolean rowExists(String tableName, TableInfo ti, Map<String, Object> map) { - StringBuilder where = new StringBuilder(); - String pfx = ""; - for (int i = 0; i < ti.columns.size(); i++) { - if (ti.iskey.get(i)) { - String col = ti.columns.get(i); - String val = Utils.getStringValue(map.get(col)); - where.append(pfx).append(col).append("=").append(val); - pfx = " AND "; - } - } - String sql = String.format("SELECT * FROM %s WHERE %s", tableName, where.toString()); - ResultSet rs = executeSQLRead(sql); - try { - boolean rv = rs.next(); - rs.close(); - return rv; - } catch (SQLException e) { - return false; - } - } - - - @Override - public void deleteRowFromSqlDb(String tableName, Map<String, Object> map) { - TableInfo ti = getTableInfo(tableName); - StringBuilder where = new StringBuilder(); - String pfx = ""; - for (int i = 0; i < ti.columns.size(); i++) { - if (ti.iskey.get(i)) { - String col = ti.columns.get(i); - Object val = map.get(col); - where.append(pfx).append(col).append("=").append(Utils.getStringValue(val)); - pfx = " AND "; - } - } - try { - String sql = String.format("DELETE FROM %s WHERE %s", tableName, where.toString()); - executeSQLWrite(sql); - } catch (SQLException e) { - e.printStackTrace(); - } - } - - /** - * This method executes a read query in the SQL database. Methods that call this method should be sure - * to call resultset.getStatement().close() when done in order to free up resources. - * @param sql the query to run - * @return a ResultSet containing the rows returned from the query - */ - @Override - public ResultSet executeSQLRead(String sql) { - logger.debug(EELFLoggerDelegate.applicationLogger,"executeSQLRead"); - logger.debug("Executing SQL read:"+ sql); - ResultSet rs = null; - try { - Statement stmt = dbConnection.createStatement(); - rs = stmt.executeQuery(sql); - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger,"executeSQLRead"+e); - } - return rs; - } - - /** - * This method executes a write query in the sql database. - * @param sql the SQL to be sent to MySQL - * @throws SQLException if an underlying JDBC method throws an exception - */ - protected void executeSQLWrite(String sql) throws SQLException { - logger.debug(EELFLoggerDelegate.applicationLogger, "Executing SQL write:"+ sql); - - Statement stmt = dbConnection.createStatement(); - stmt.execute(sql); - stmt.close(); - } - - /** - * Code to be run within the DB driver before a SQL statement is executed. This is where tables - * can be synchronized before a SELECT, for those databases that do not support SELECT triggers. - * @param sql the SQL statement that is about to be executed - * @return list of keys that will be updated, if they can't be determined afterwards (i.e. sql table doesn't have primary key) - */ - @Override - public void preStatementHook(final String sql) { - if (sql == null) { - return; - } - String cmd = sql.trim().toLowerCase(); - if (cmd.startsWith("select")) { - String[] parts = sql.trim().split(" "); - Set<String> set = getSQLTableSet(); - for (String part : parts) { - if (set.contains(part.toUpperCase())) { - // Found a candidate table name in the SELECT SQL -- update this table - //msm.readDirtyRowsAndUpdateDb(part); - } - } - } - } - - /** - * Code to be run within the DB driver after a SQL statement has been executed. This is where remote - * statement actions can be copied back to Cassandra/MUSIC. - * @param sql the SQL statement that was executed - */ - @Override - public void postStatementHook(final String sql,Map<Range,StagingTable> transactionDigest) { - if (sql != null) { - String[] parts = sql.trim().split(" "); - String cmd = parts[0].toLowerCase(); - if ("delete".equals(cmd) || "insert".equals(cmd) || "update".equals(cmd)) { - try { - this.updateStagingTable(transactionDigest); - } catch (NoSuchFieldException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - } - } - - private OperationType toOpEnum(String operation) throws NoSuchFieldException { - switch (operation.toLowerCase()) { - case "i": - return OperationType.INSERT; - case "d": - return OperationType.DELETE; - case "u": - return OperationType.UPDATE; - case "s": - return OperationType.SELECT; - default: - logger.error(EELFLoggerDelegate.errorLogger,"Invalid operation selected: ["+operation+"]"); - throw new NoSuchFieldException("Invalid operation enum"); - } - - } - /** - * Copy data that is in transaction table into music interface - * @param transactionDigests - * @throws NoSuchFieldException - */ - private void updateStagingTable(Map<Range,StagingTable> transactionDigests) throws NoSuchFieldException { - // copy from DB.MDBC_TRANSLOG where connid == myconnid - // then delete from MDBC_TRANSLOG - String sql2 = "SELECT IX, TABLENAME, OP, KEYDATA, NEWROWDATA FROM "+TRANS_TBL +" WHERE CONNECTION_ID = " + this.connId; - try { - ResultSet rs = executeSQLRead(sql2); - Set<Integer> rows = new TreeSet<Integer>(); - while (rs.next()) { - int ix = rs.getInt("IX"); - String op = rs.getString("OP"); - OperationType opType = toOpEnum(op); - String tbl = rs.getString("TABLENAME"); - String keydataStr = rs.getString("KEYDATA"); - String newRowStr = rs.getString("NEWROWDATA"); - JSONObject newRow = new JSONObject(new JSONTokener(newRowStr)); - String musicKey; - TableInfo ti = getTableInfo(tbl); - if (!ti.hasKey()) { - //create music key - //\TODO fix, this is completely broken - //if (op.startsWith("I")) { - //\TODO Improve the generation of primary key, it should be generated using - // the actual columns, otherwise performance when doing range queries are going - // to be even worse (see the else bracket down) - // - musicKey = msm.generateUniqueKey(); - /*} else { - //get key from data - musicKey = msm.getMusicKeyFromRowWithoutPrimaryIndexes(tbl,newRow); - }*/ - newRow.put(msm.getMusicDefaultPrimaryKeyName(), musicKey); - } - else { - //Use the keys - musicKey = msm.getMusicKeyFromRow(tbl, newRow); - if(musicKey.isEmpty()) { - logger.error(EELFLoggerDelegate.errorLogger,"Primary key is invalid: ["+tbl+","+op+"]"); - throw new NoSuchFieldException("Invalid operation enum"); - } - } - Range range = new Range(tbl); - if(!transactionDigests.containsKey(range)) { - transactionDigests.put(range, new StagingTable()); - } - transactionDigests.get(range).addOperation(musicKey, opType, newRow.toString()); - rows.add(ix); - } - rs.getStatement().close(); - if (rows.size() > 0) { - sql2 = "DELETE FROM "+TRANS_TBL+" WHERE IX = ?"; - PreparedStatement ps = dbConnection.prepareStatement(sql2); - logger.debug("Executing: "+sql2); - logger.debug(" For ix = "+rows); - for (int ix : rows) { - ps.setInt(1, ix); - ps.execute(); - } - ps.close(); - } - } catch (SQLException e) { - logger.warn("Exception in postStatementHook: "+e); - e.printStackTrace(); - } - } - - - - /** - * Update music with data from MySQL table - * - * @param tableName - name of table to update in music - */ - @Override - public void synchronizeData(String tableName) { - ResultSet rs = null; - TableInfo ti = getTableInfo(tableName); - String query = "SELECT * FROM "+tableName; - - try { - rs = executeSQLRead(query); - if(rs==null) return; - while(rs.next()) { - - JSONObject jo = new JSONObject(); - if (!getTableInfo(tableName).hasKey()) { - String musicKey = msm.generateUniqueKey(); - jo.put(msm.getMusicDefaultPrimaryKeyName(), musicKey); - } - - for (String col : ti.columns) { - jo.put(col, rs.getString(col)); - } - - @SuppressWarnings("unused") - Object[] row = Utils.jsonToRow(ti,tableName, jo,msm.getMusicDefaultPrimaryKeyName()); - //\FIXME this is wrong now, update of the dirty row and entity is now handled by the archival process - //msm.updateDirtyRowAndEntityTableInMusic(ti,tableName, jo); - } - } catch (Exception e) { - logger.error(EELFLoggerDelegate.errorLogger, "synchronizing data " + tableName + - " -> " + e.getMessage()); - } - finally { - try { - rs.close(); - } catch (SQLException e) { - //continue - } - } - - } - - /** - * Return a list of "reserved" names, that should not be used by MySQL client/MUSIC - * These are reserved for mdbc - */ - @Override - public List<String> getReservedTblNames() { - ArrayList<String> rsvdTables = new ArrayList<String>(); - rsvdTables.add(TRANS_TBL); - //Add others here as necessary - return rsvdTables; - } - @Override - public String getPrimaryKey(String sql, String tableName) { - // - return null; - } - - @SuppressWarnings("unused") - @Deprecated - private ArrayList<String> getMusicKey(String sql) { - try { - net.sf.jsqlparser.statement.Statement stmt = CCJSqlParserUtil.parse(sql); - if (stmt instanceof Insert) { - Insert s = (Insert) stmt; - String tbl = s.getTable().getName(); - return getMusicKey(tbl, "INSERT", sql); - } else if (stmt instanceof Update){ - Update u = (Update) stmt; - String tbl = u.getTables().get(0).getName(); - return getMusicKey(tbl, "UPDATE", sql); - } else if (stmt instanceof Delete) { - Delete d = (Delete) stmt; - //TODO: IMPLEMENT - String tbl = d.getTable().getName(); - return getMusicKey(tbl, "DELETE", sql); - } else { - System.err.println("Not recognized sql type"); - } - - } catch (JSQLParserException e) { - - e.printStackTrace(); - } - //Something went wrong here - return new ArrayList<String>(); - } - - /** - * Returns all keys that matches the current sql statement, and not in already updated keys. - * - * @param tbl - * @param cmd - * @param sql - */ - @Deprecated - private ArrayList<String> getMusicKey(String tbl, String cmd, String sql) { - ArrayList<String> musicKeys = new ArrayList<String>(); - /* - if (cmd.equalsIgnoreCase("insert")) { - //create key, return key - musicKeys.add(msm.generatePrimaryKey()); - } else if (cmd.equalsIgnoreCase("update") || cmd.equalsIgnoreCase("delete")) { - try { - net.sf.jsqlparser.statement.Statement stmt = CCJSqlParserUtil.parse(sql); - String where; - if (stmt instanceof Update) { - where = ((Update) stmt).getWhere().toString(); - } else if (stmt instanceof Delete) { - where = ((Delete) stmt).getWhere().toString(); - } else { - System.err.println("Unknown type: " +stmt.getClass()); - where = ""; - } - ResultSet rs = executeSQLRead("SELECT * FROM " + tbl + " WHERE " + where); - musicKeys = msm.getMusicKeysWhere(tbl, Utils.parseResults(getTableInfo(tbl), rs)); - } catch (JSQLParserException e) { - - e.printStackTrace(); - } catch (SQLException e) { - //Not a valid sql query - e.printStackTrace(); - } - } - */ - return musicKeys; - } - - - @Deprecated - public void insertRowIntoSqlDbOLD(String tableName, Map<String, Object> map) { - // First construct the value string and column name string for the db write - TableInfo ti = getTableInfo(tableName); - StringBuilder fields = new StringBuilder(); - StringBuilder values = new StringBuilder(); - String pfx = ""; - for (String col : ti.columns) { - fields.append(pfx).append(col); - values.append(pfx).append(Utils.getStringValue(map.get(col))); - pfx = ", "; - } - - try { - String sql = String.format("INSERT INTO %s (%s) VALUES (%s);", tableName, fields.toString(), values.toString()); - executeSQLWrite(sql); - } catch (SQLException e) { - logger.error(EELFLoggerDelegate.errorLogger,"Insert failed because row exists, do an update"); - StringBuilder where = new StringBuilder(); - pfx = ""; - String pfx2 = ""; - fields.setLength(0); - for (int i = 0; i < ti.columns.size(); i++) { - String col = ti.columns.get(i); - String val = Utils.getStringValue(map.get(col)); - if (ti.iskey.get(i)) { - where.append(pfx).append(col).append("=").append(val); - pfx = " AND "; - } else { - fields.append(pfx2).append(col).append("=").append(val); - pfx2 = ", "; - } - } - String sql = String.format("UPDATE %s SET %s WHERE %s", tableName, fields.toString(), where.toString()); - try { - executeSQLWrite(sql); - } catch (SQLException e1) { - logger.error(EELFLoggerDelegate.errorLogger,"executeSQLWrite"+e1); - } - } - } -} diff --git a/src/main/java/org/onap/music/mdbc/mixins/Utils.java b/src/main/java/org/onap/music/mdbc/mixins/Utils.java deleted file mode 100755 index 2fd0f6e..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/Utils.java +++ /dev/null @@ -1,220 +0,0 @@ -package org.onap.music.mdbc.mixins; - -import java.io.IOException; -import java.io.InputStream; -import java.math.BigDecimal; -import java.nio.ByteBuffer; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Timestamp; -import java.sql.Types; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Properties; - -import org.json.JSONObject; - -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.mdbc.TableInfo; -import com.datastax.driver.core.utils.Bytes; - -/** - * Utility functions used by several of the mixins should go here. - * - * @author Robert P. Eby - */ -public class Utils { - private static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(Utils.class); - - /** - * Transforms and JsonObject into an array of objects - * @param ti information related to the table - * @param tbl table that jo belong to - * @param jo object that represents a row in the table - * @param musicDefaultPrimaryKeyName contains the name of key associated with the default primary key used by MUSIC, it can be null, if not requird - * @return array with the objects in the row - */ - public static Object[] jsonToRow(TableInfo ti, String tbl, JSONObject jo, String musicDefaultPrimaryKeyName) { - int columnSize = ti.columns.size(); - ArrayList<Object> rv = new ArrayList<Object>(); - if (musicDefaultPrimaryKeyName!=null && jo.has(musicDefaultPrimaryKeyName)) { - rv.add(jo.getString(musicDefaultPrimaryKeyName)); - } - for (int i = 0; i < columnSize; i++) { - String colname = ti.columns.get(i); - switch (ti.coltype.get(i)) { - case Types.BIGINT: - rv.add(jo.optLong(colname, 0)); - break; - case Types.BOOLEAN: - rv.add(jo.optBoolean(colname, false)); - break; - case Types.BLOB: - rv.add(jo.optString(colname, "")); - break; - case Types.DECIMAL: - rv.add(jo.optBigDecimal(colname, BigDecimal.ZERO)); - break; - case Types.DOUBLE: - rv.add(jo.optDouble(colname, 0)); - break; - case Types.INTEGER: - rv.add(jo.optInt(colname, 0)); - break; - case Types.TIMESTAMP: - //rv[i] = new Date(jo.optString(colname, "")); - rv.add(jo.optString(colname, "")); - break; - case Types.DATE: - case Types.VARCHAR: - //Fall through - default: - rv.add(jo.optString(colname, "")); - break; - } - } - return rv.toArray(); - } - - /** - * Return a String equivalent of an Object. Useful for writing SQL. - * @param val the object to String-ify - * @return the String value - */ - public static String getStringValue(Object val) { - if (val == null) - return "NULL"; - if (val instanceof String) - return "'" + val.toString().replaceAll("'", "''") + "'"; // double any quotes - if (val instanceof Number) - return ""+val; - if (val instanceof ByteBuffer) - return "'" + Bytes.toHexString((ByteBuffer)val).substring(2) + "'"; // substring(2) is to remove the "0x" at front - if (val instanceof Date) - return "'" + (new Timestamp(((Date)val).getTime())).toString() + "'"; - // Boolean, and anything else - return val.toString(); - } - - /** - * Parse result set and put into object array - * @param tbl - * @param rs - * @return - * @throws SQLException - */ - public static ArrayList<Object[]> parseResults(TableInfo ti, ResultSet rs) throws SQLException { - ArrayList<Object[]> results = new ArrayList<Object[]>(); - while (rs.next()) { - Object[] row = new Object[ti.columns.size()]; - for (int i = 0; i < ti.columns.size(); i++) { - String colname = ti.columns.get(i); - switch (ti.coltype.get(i)) { - case Types.BIGINT: - row[i] = rs.getLong(colname); - break; - case Types.BOOLEAN: - row[i] = rs.getBoolean(colname); - break; - case Types.BLOB: - System.err.println("WE DO NOT SUPPORT BLOBS IN H2!! COLUMN NAME="+colname); - //logger.error("WE DO NOT SUPPORT BLOBS IN H2!! COLUMN NAME="+colname); - // throw an exception here??? - break; - case Types.DOUBLE: - row[i] = rs.getDouble(colname); - break; - case Types.INTEGER: - row[i] = rs.getInt(colname); - break; - case Types.TIMESTAMP: - //rv[i] = new Date(jo.optString(colname, "")); - row[i] = rs.getString(colname); - break; - case Types.VARCHAR: - //Fall through - default: - row[i] = rs.getString(colname); - break; - } - } - results.add(row); - } - return results; - } - - @SuppressWarnings("unused") - static List<Class<?>> getClassesImplementing(Class<?> implx) { - Properties pr = null; - try { - pr = new Properties(); - pr.load(Utils.class.getResourceAsStream("/mdbc_driver.properties")); - } - catch (IOException e) { - logger.error(EELFLoggerDelegate.errorLogger, "Could not load property file > " + e.getMessage()); - } - - List<Class<?>> list = new ArrayList<Class<?>>(); - if (pr==null) { - return list; - } - String mixins = pr.getProperty("MIXINS"); - for (String className: mixins.split("[ ,]")) { - try { - Class<?> cl = Class.forName(className.trim()); - if (MixinFactory.impl(cl, implx)) { - list.add(cl); - } - } catch (ClassNotFoundException e) { - logger.error(EELFLoggerDelegate.errorLogger,"Mixin class "+className+" not found."); - } - } - return list; - } - - public static void registerDefaultDrivers() { - Properties pr = null; - try { - pr = new Properties(); - pr.load(Utils.class.getResourceAsStream("/mdbc_driver.properties")); - } - catch (IOException e) { - logger.error("Could not load property file > " + e.getMessage()); - } - - @SuppressWarnings("unused") - List<Class<?>> list = new ArrayList<Class<?>>(); - String drivers = pr.getProperty("DEFAULT_DRIVERS"); - for (String driver: drivers.split("[ ,]")) { - logger.info(EELFLoggerDelegate.applicationLogger, "Registering jdbc driver '" + driver + "'"); - try { - @SuppressWarnings("unused") - Class<?> cl = Class.forName(driver.trim()); - } catch (ClassNotFoundException e) { - logger.error(EELFLoggerDelegate.errorLogger,"Driver class "+driver+" not found."); - } - } - } - - public static Properties getMdbcProperties() { - Properties prop = new Properties(); - InputStream input = null; - try { - input = Utils.class.getClassLoader().getResourceAsStream("/mdbc.properties"); - prop.load(input); - } catch (Exception e) { - logger.warn(EELFLoggerDelegate.applicationLogger, "Could load mdbc.properties." - + "Proceeding with defaults " + e.getMessage()); - } finally { - if (input != null) { - try { - input.close(); - } catch (IOException e) { - logger.error(EELFLoggerDelegate.errorLogger, e.getMessage()); - } - } - } - return prop; - } -} diff --git a/src/main/java/org/onap/music/mdbc/mixins/package-info.java b/src/main/java/org/onap/music/mdbc/mixins/package-info.java deleted file mode 100755 index 703a119..0000000 --- a/src/main/java/org/onap/music/mdbc/mixins/package-info.java +++ /dev/null @@ -1,47 +0,0 @@ -/** - * <p> - * This package provides the "mixins" to use when constructing a MusicSqlManager. The mixins define how MusicSqlManager - * will interface both to the database being mirrored (via the {@link org.onap.music.mdbc.mixins.DBInterface} interface), - * and how it will interface to the persistence layer provided by MUSIC (via the {@link org.onap.music.mdbc.mixins.MusicInterface} - * interface). - * </p> - * <p> - * The choice of which mixins to use is determined by the MusicSqlManager constructor. - * It will decide based upon the URL and connection properties with which it is presented (from the - * {@link java.sql.DriverManager#getConnection(String, java.util.Properties)} call). - * </p> - * <p> - * The list of mixins that may be selected from is stored in the properties files <code>mdbc.properties</code> - * under the name MIXINS. This implementation provides the following mixins: - * </p> - * <table summary=""> - * <tr><th>Name</th><th>Class</th><th>Description</th></tr> - * <tr><td>cassandra</td><td>c.a.r.m.m.CassandraMixin</td><td>A <a href="http://cassandra.apache.org/">Cassandra</a> based - * persistence layer (without any of the table locking that MUSIC normally provides).</td></tr> - * <tr><td>cassandra2</td><td>c.a.r.m.m.Cassandra2Mixin</td><td>Similar to the <i>cassandra</i> mixin, but stores all - * dirty row information in one table, rather than one table per real table.</td></tr> - * <tr><td>h2</td><td>c.a.r.m.m.H2Mixin</td><td>This mixin provides access to either an in-memory, or a local - * (file-based) version of the H2 database.</td></tr> - * <tr><td>h2server</td><td>c.a.r.m.m.H2ServerMixin</td><td>This mixin provides access to a copy of the H2 database - * running as a server. Because the server needs special Java classes in order to handle certain TRIGGER actions, the - * server must be et up in a special way (see below).</td></tr> - * <tr><td>mysql</td><td>c.a.r.m.m.MySQLMixin</td><td>This mixin provides access to MySQL running on a remote server.</td></tr> - * </table> - * <h2>Starting the H2 Server</h2> - * <p> - * The H2 Server, when used with MDBC, must contain the MDBC Trigger class, and supporting libraries. - * This can be done as follows: - * </p> - * <pre> - * CLASSPATH=$PWD/target/mdbc-h2server-0.0.1-SNAPSHOT.jar - * CLASSPATH=$CLASSPATH:$HOME/.m2/repository/com/h2database/h2/1.3.168/h2-1.3.168.jar - * CLASSPATH=$CLASSPATH:$HOME/.m2/repository/log4j/log4j/1.2.17/log4j-1.2.17.jar - * CLASSPATH=$CLASSPATH:$HOME/.m2/repository/org/json/json/20160810/json-20160810.jar - * export CLASSPATH - * java org.h2.tools.Server - * </pre> - * <p> - * The <code>mdbc-h2server-0.0.1-SNAPSHOT.jar</code> file is built with Maven using the <code>pom-h2server.xml</code> pom file. - * </p> - */ -package org.onap.music.mdbc.mixins; diff --git a/src/main/java/org/onap/music/mdbc/package-info.java b/src/main/java/org/onap/music/mdbc/package-info.java deleted file mode 100755 index 576ab88..0000000 --- a/src/main/java/org/onap/music/mdbc/package-info.java +++ /dev/null @@ -1,87 +0,0 @@ -/** - * <p> - * This package provides a JDBC driver that can be used to mirror the contents of a database to and from - * <a href="http://cassandra.apache.org/">Cassandra</a>. The mirroring occurs as a side effect of - * execute() statements against a JDBC connection, and triggers placed in the database to catch database modifications. - * The initial implementation is written to mirror an <a href="http://h2database.com/">H2</a> database. - * </p> - * <p> - * This JDBC driver will intercept all table creations, SELECTs, INSERTs, DELETEs, and UPDATEs made to the underlying - * database, and make sure they are copied to Cassandra. In addition, for every table XX that is created, another table - * DIRTY_XX will be created to communicate the existence of <i>dirty rows</i> to other Cassandra replicas (with the - * Cassandra2 Mixin, the table is called DIRTY____ and there is only one table). Dirty rows - * will be copied, as needed back into the database from Cassandra before any SELECT. - * </p> - * <h3>To use with JDBC</h3> - * <ol> - * <li>Add this jar, and all dependent jars to your CLASSPATH.</li> - * <li>Rewrite your JDBC URLs from <code>jdbc:h2:...</code> to <code>jdbc:mdbc:...</code>. - * <li>If you supply properties to the {@link java.sql.DriverManager#getConnection(String, java.util.Properties)} call, - * use the following optional properties to control behavior of the proxy: - * <table summary=""> - * <tr><th>Property Name</th><th>Property Value</th><th>Default Value</th></tr> - * <tr><td>MDBC_DB_MIXIN</td><td>The mixin name to use to select the database mixin to use for this connection.</td></tr> - * <tr><td>MDBC_MUSIC_MIXIN</td><td>The mixin name to use to select the MUSIC mixin to use for this connection.</td></tr> - * <tr><td>myid</td><td>The ID of this replica in the collection of replicas sharing the same tables.</td><td>0</td></tr> - * <tr><td>replicas</td><td>A comma-separated list of replica names for the collection of replicas sharing the same tables.</td><td>the value of <i>myid</i></td></tr> - * <tr><td>music_keyspace</td><td>The keyspace name to use in Cassandra for all tables created by this instance of MDBC.</td><td>mdbc</td></tr> - * <tr><td>music_address</td><td>The IP address to use to connect to Cassandra.</td><td>localhost</td></tr> - * <tr><td>music_rfactor</td><td>The replication factor to use for the new keyspace that is created.</td><td>2</td></tr> - * <tr><td>disabled</td><td>If set to <i>true</i> the mirroring is completely disabled; this is the equivalent of using the database driver directly.</td><td>false</td></tr> - * </table> - * </li> - * <li>Load the driver using the following call: - * <pre> - * Class.forName("org.onap.music.mdbc.ProxyDriver"); - * </pre></li> - * </ol> - * <p>Because, under the current design, the MDBC driver must be running within the same JVM as the database, MDBC - * will only explicitly support in-memory databases (URL of <code>jdbc:mdbc:mem:...</code>), or local file - * databases (URL of <code>jdbc:mdbc:/path/to/file</code>). Attempts to access a remote H2 server (URL - * <code>jdbc:mdbc:tcp://host/path/to/db</code>) will probably not work, although MDBC will not stop you from trying. - * </p> - * - * <h3>To Define a Tomcat DataSource Resource</h3> - * <p>The following code snippet can be used as a guide when setting up a Tomcat DataSource Resource. - * This snippet goes in the <i>server.xml</i> file. The items in <b>bold</b> indicate changed or new items:</p> - * <pre> - * <Resource name="jdbc/ProcessEngine" - * auth="Container" - * type="javax.sql.DataSource" - * factory="org.apache.tomcat.jdbc.pool.DataSourceFactory" - * uniqueResourceName="process-engine" - * driverClassName="<b>org.onap.music.mdbc.ProxyDriver</b>" - * url="jdbc:<b>mdbc</b>:./camunda-h2-dbs/process-engine;MVCC=TRUE;TRACE_LEVEL_FILE=0;DB_CLOSE_ON_EXIT=FALSE" - * <b>connectionProperties="myid=0;replicas=0,1,2;music_keyspace=camunda;music_address=localhost"</b> - * username="sa" - * password="sa" - * maxActive="20" - * minIdle="5" /> - * </pre> - * - * <h3>To Define a JBoss DataSource</h3> - * <p>The following code snippet can be used as a guide when setting up a JBoss DataSource. - * This snippet goes in the <i>service.xml</i> file. The items in <b>bold</b> indicate changed or new items:</p> - * <pre> - * <datasources> - * <datasource jta="true" jndi-name="java:jboss/datasources/ProcessEngine" pool-name="ProcessEngine" enabled="true" use-java-context="true" use-ccm="true"> - * <connection-url>jdbc:<b>mdbc</b>:/opt/jboss-eap-6.2.4/standalone/camunda-h2-dbs/process-engine;DB_CLOSE_DELAY=-1;MVCC=TRUE;DB_CLOSE_ON_EXIT=FALSE</connection-url> - * <b><connection-property name="music_keyspace"> - * camunda - * </connection-property></b> - * <driver>mdbc</driver> - * <security> - * <user-name>sa</user-name> - * <password>sa</password> - * </security> - * </datasource> - * <drivers> - * <b><driver name="mdbc" module="org.onap.music.mdbc"> - * <driver-class>org.onap.music.mdbc.ProxyDriver</driver-class> - * </driver></b> - * </drivers> - * </datasources> - * </pre> - * <p>Note: This assumes that you have built and installed the <b>org.onap.music.mdbc</b> module within JBoss. - */ -package org.onap.music.mdbc; diff --git a/src/main/java/org/onap/music/mdbc/tables/MriReference.java b/src/main/java/org/onap/music/mdbc/tables/MriReference.java deleted file mode 100644 index 29de8d0..0000000 --- a/src/main/java/org/onap/music/mdbc/tables/MriReference.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.onap.music.mdbc.tables; - -import java.util.UUID; - -public final class MriReference { - public final String table; - public final UUID index; - - public MriReference(String table, UUID index) { - this.table = table; - this.index= index; - } - -} diff --git a/src/main/java/org/onap/music/mdbc/tables/MusicRangeInformationRow.java b/src/main/java/org/onap/music/mdbc/tables/MusicRangeInformationRow.java deleted file mode 100644 index 8a1d2e8..0000000 --- a/src/main/java/org/onap/music/mdbc/tables/MusicRangeInformationRow.java +++ /dev/null @@ -1,22 +0,0 @@ -package org.onap.music.mdbc.tables; - -import java.util.List; -import java.util.UUID; - -public final class MusicRangeInformationRow { - public final UUID index; - public final PartitionInformation partition; - public final List<MusicTxDigestId> redoLog; - public final String ownerId; - public final String metricProcessId; - - public MusicRangeInformationRow(UUID index, List<MusicTxDigestId> redoLog, PartitionInformation partition, - String ownerId, String metricProcessId) { - this.index = index; - this.redoLog = redoLog; - this.partition = partition; - this.ownerId = ownerId; - this.metricProcessId = metricProcessId; - } - -} diff --git a/src/main/java/org/onap/music/mdbc/tables/MusicTxDigestId.java b/src/main/java/org/onap/music/mdbc/tables/MusicTxDigestId.java deleted file mode 100644 index 5b8fadd..0000000 --- a/src/main/java/org/onap/music/mdbc/tables/MusicTxDigestId.java +++ /dev/null @@ -1,15 +0,0 @@ -package org.onap.music.mdbc.tables; - -import java.util.UUID; - -public final class MusicTxDigestId { - public final UUID tablePrimaryKey; - - public MusicTxDigestId(UUID primaryKey) { - this.tablePrimaryKey= primaryKey; - } - - public boolean isEmpty() { - return (this.tablePrimaryKey==null); - } -} diff --git a/src/main/java/org/onap/music/mdbc/tables/Operation.java b/src/main/java/org/onap/music/mdbc/tables/Operation.java deleted file mode 100644 index d3aabe0..0000000 --- a/src/main/java/org/onap/music/mdbc/tables/Operation.java +++ /dev/null @@ -1,36 +0,0 @@ -package org.onap.music.mdbc.tables; - -import java.io.Serializable; - -import org.json.JSONObject; -import org.json.JSONTokener; - -public final class Operation implements Serializable{ - - private static final long serialVersionUID = -1215301985078183104L; - - final OperationType TYPE; - final String NEW_VAL; - - public Operation(OperationType type, String newVal) { - TYPE = type; - NEW_VAL = newVal; - } - - public JSONObject getNewVal(){ - JSONObject newRow = new JSONObject(new JSONTokener(NEW_VAL)); - return newRow; - } - - public OperationType getOperationType() { - return this.TYPE; - } - - @Override - public boolean equals(Object o){ - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Operation r = (Operation) o; - return TYPE.equals(r.TYPE) && NEW_VAL.equals(r.NEW_VAL); - } -} diff --git a/src/main/java/org/onap/music/mdbc/tables/OperationType.java b/src/main/java/org/onap/music/mdbc/tables/OperationType.java deleted file mode 100644 index 1ccd919..0000000 --- a/src/main/java/org/onap/music/mdbc/tables/OperationType.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.onap.music.mdbc.tables; - -public enum OperationType{ - DELETE, UPDATE, INSERT, SELECT -} diff --git a/src/main/java/org/onap/music/mdbc/tables/PartitionInformation.java b/src/main/java/org/onap/music/mdbc/tables/PartitionInformation.java deleted file mode 100644 index 2f048b7..0000000 --- a/src/main/java/org/onap/music/mdbc/tables/PartitionInformation.java +++ /dev/null @@ -1,13 +0,0 @@ -package org.onap.music.mdbc.tables; - -import org.onap.music.mdbc.Range; - -import java.util.List; - -public class PartitionInformation { - public final List<Range> ranges; - - public PartitionInformation(List<Range> ranges) { - this.ranges=ranges; - } -} diff --git a/src/main/java/org/onap/music/mdbc/tables/StagingTable.java b/src/main/java/org/onap/music/mdbc/tables/StagingTable.java deleted file mode 100644 index 6e93856..0000000 --- a/src/main/java/org/onap/music/mdbc/tables/StagingTable.java +++ /dev/null @@ -1,51 +0,0 @@ -package org.onap.music.mdbc.tables; - -import java.io.Serializable; -import java.util.Deque; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.Set; -import org.apache.commons.lang3.tuple.Pair; -import org.json.JSONObject; - -import org.onap.music.logging.EELFLoggerDelegate; - -public class StagingTable implements Serializable{ - /** - * - */ - private static final long serialVersionUID = 7583182634761771943L; - private transient static EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(StagingTable.class); - //primary key -> Operation - private HashMap<String,Deque<Operation>> operations; - - public StagingTable() { - operations = new HashMap<>(); - } - - synchronized public void addOperation(String key, OperationType type, String newVal) { - if(!operations.containsKey(key)) { - operations.put(key, new LinkedList<>()); - } - operations.get(key).add(new Operation(type,newVal)); - } - - synchronized public Deque<Pair<String,Operation>> getIterableSnapshot() throws NoSuchFieldException{ - Deque<Pair<String,Operation>> response=new LinkedList<Pair<String,Operation>>(); - //\TODO: check if we can just return the last change to a given key - Set<String> keys = operations.keySet(); - for(String key : keys) { - Deque<Operation> ops = operations.get(key); - if(ops.isEmpty()) { - logger.error(EELFLoggerDelegate.errorLogger, "Invalid state of the Operation data structure when creating snapshot"); - throw new NoSuchFieldException("Invalid state of the operation data structure"); - } - response.add(Pair.of(key,ops.getLast())); - } - return response; - } - - synchronized public void clean() { - operations.clear(); - } -} diff --git a/src/main/java/org/onap/music/mdbc/tables/TxCommitProgress.java b/src/main/java/org/onap/music/mdbc/tables/TxCommitProgress.java deleted file mode 100644 index 02942c6..0000000 --- a/src/main/java/org/onap/music/mdbc/tables/TxCommitProgress.java +++ /dev/null @@ -1,204 +0,0 @@ -package org.onap.music.mdbc.tables; - -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import com.datastax.driver.core.utils.UUIDs; - -import org.onap.music.logging.EELFLoggerDelegate; - -import java.sql.Connection; - - -public class TxCommitProgress{ - private EELFLoggerDelegate logger = EELFLoggerDelegate.getLogger(TxCommitProgress.class); - - private Map<String, CommitProgress> transactionInfo; - - public TxCommitProgress(){ - transactionInfo = new ConcurrentHashMap<>(); - } - - public boolean containsTx(String txId) { - return transactionInfo.containsKey(txId); - } - - public UUID getCommitId(String txId) { - CommitProgress prog = transactionInfo.get(txId); - if(prog.isCommitIdAssigned()) { - return prog.getCommitId(); - } - UUID commitId = UUIDs.random(); - prog.setCommitId(commitId); - return commitId; - } - - public void createNewTransactionTracker(String id, Connection conn) { - transactionInfo.put(id, new CommitProgress(id,conn)); - } - - public void commitRequested(String txId) { - CommitProgress prog = transactionInfo.get(txId); - if(prog == null){ - logger.error(EELFLoggerDelegate.errorLogger, "Transaction doesn't exist: [%l], failure when storing commit request",txId); - } - prog.setCommitRequested(); - } - - public void setSQLDone(String txId) { - CommitProgress prog = transactionInfo.get(txId); - if(prog == null){ - logger.error(EELFLoggerDelegate.errorLogger, "Transaction doesn't exist: [%l], failure when storing saving completion of SQL",txId); - } - prog.setSQLCompleted(); - } - - public void setMusicDone(String txId) { - CommitProgress prog = transactionInfo.get(txId); - if(prog == null){ - logger.error(EELFLoggerDelegate.errorLogger, "Transaction doesn't exist: [%l], failure when storing saving completion of Music",txId); - } - prog.setMusicCompleted(); - } - - public Connection getConnection(String txId){ - CommitProgress prog = transactionInfo.get(txId); - if(prog == null){ - logger.error(EELFLoggerDelegate.errorLogger, "Transaction doesn't exist: [%l], failure when retrieving statement",txId); - } - return prog.getConnection(); - } - - public void setRecordId(String txId, MusicTxDigestId recordId){ - CommitProgress prog = transactionInfo.get(txId); - if(prog == null){ - logger.error(EELFLoggerDelegate.errorLogger, "Transaction doesn't exist: [%l], failure when setting record Id",txId); - } - prog.setRecordId(recordId); - } - - public MusicTxDigestId getRecordId(String txId) { - CommitProgress prog = transactionInfo.get(txId); - if(prog == null){ - logger.error(EELFLoggerDelegate.errorLogger, "Transaction doesn't exist: [%l], failure when getting record Id",txId); - } - return prog.getRecordId(); - } - - public boolean isRecordIdAssigned(String txId) { - CommitProgress prog = transactionInfo.get(txId); - if(prog == null){ - logger.error(EELFLoggerDelegate.errorLogger, "Transaction doesn't exist: [%l], failure when checking record",txId); - } - return prog.isRedoRecordAssigned(); - } - - public boolean isComplete(String txId) { - CommitProgress prog = transactionInfo.get(txId); - if(prog == null){ - logger.error(EELFLoggerDelegate.errorLogger, "Transaction doesn't exist: [%l], failure when checking completion",txId); - } - return prog.isComplete(); - } - - public void reinitializeTxProgress(String txId) { - CommitProgress prog = transactionInfo.get(txId); - if(prog == null){ - logger.error(EELFLoggerDelegate.errorLogger, "Transaction doesn't exist: [%l], failure when reinitializing tx progress",txId); - } - prog.reinitialize(); - } - - public void deleteTxProgress(String txId){ - transactionInfo.remove(txId); - } -} - -final class CommitProgress{ - private String lTxId; // local transaction id - private UUID commitId; // commit id - private boolean commitRequested; //indicates if the user tried to commit the request already. - private boolean SQLDone; // indicates if SQL was already committed - private boolean MusicDone; // indicates if music commit was already performed, atomic bool - private Connection connection;// reference to a connection object. This is used to complete a commit if it failed in the original thread. - private Long timestamp; // last time this data structure was updated - private MusicTxDigestId musicTxDigestId;// record id for each partition - - public CommitProgress(String id,Connection conn){ - musicTxDigestId =null; - lTxId = id; - commitRequested = false; - SQLDone = false; - MusicDone = false; - connection = conn; - commitId = null; - timestamp = System.currentTimeMillis(); - } - - public synchronized boolean isComplete() { - return commitRequested && SQLDone && MusicDone; - } - - public synchronized void setCommitId(UUID commitId) { - this.commitId = commitId; - timestamp = System.currentTimeMillis(); - } - - public synchronized void reinitialize() { - commitId = null; - musicTxDigestId =null; - commitRequested = false; - SQLDone = false; - MusicDone = false; - timestamp = System.currentTimeMillis(); - } - - public synchronized void setCommitRequested() { - commitRequested = true; - timestamp = System.currentTimeMillis(); - } - - public synchronized void setSQLCompleted() { - SQLDone = true; - timestamp = System.currentTimeMillis(); - } - - public synchronized void setMusicCompleted() { - MusicDone = true; - timestamp = System.currentTimeMillis(); - } - - public Connection getConnection() { - timestamp = System.currentTimeMillis(); - return connection; - } - - public long getTimestamInMillis() { - return timestamp; - } - - public synchronized void setRecordId(MusicTxDigestId id) { - musicTxDigestId = id; - timestamp = System.currentTimeMillis(); - } - - public synchronized boolean isRedoRecordAssigned() { - return this.musicTxDigestId !=null; - } - - public synchronized MusicTxDigestId getRecordId() { - return musicTxDigestId; - } - - public synchronized UUID getCommitId() { - return commitId; - } - - public synchronized String getId() { - return this.lTxId; - } - - public synchronized boolean isCommitIdAssigned() { - return this.commitId!= null; - } -}
\ No newline at end of file diff --git a/src/main/java/org/onap/music/mdbc/tests/ConnectionTest.java b/src/main/java/org/onap/music/mdbc/tests/ConnectionTest.java deleted file mode 100644 index 5a0d98c..0000000 --- a/src/main/java/org/onap/music/mdbc/tests/ConnectionTest.java +++ /dev/null @@ -1,419 +0,0 @@ -package org.onap.music.mdbc.tests; - -//import java.sql.Connection; -//import java.sql.DriverManager; -//import java.sql.PreparedStatement; -//import java.sql.ResultSet; -//import java.sql.SQLException; -//import java.sql.Statement; -//import java.util.HashSet; -//import java.util.Properties; -//import java.util.Set; -// -//import org.h2.tools.Server; -//import org.junit.After; -//import org.junit.AfterClass; -//import org.junit.Before; -//import org.junit.BeforeClass; -//import org.junit.Test; -//import org.slf4j.Logger; -//import org.slf4j.LoggerFactory; -// -//import com.mysql.jdbc.jdbc2.optional.MysqlDataSource; - - -//@FixMethodOrder(MethodSorters.NAME_ASCENDING) -//@RunWith(ConcurrentTestRunner.class) -public class ConnectionTest { -// -//// static { -//// System.setProperty(org.slf4j.impl.SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "INFO"); -//// System.setProperty(org.slf4j.impl.SimpleLogger.LOG_FILE_KEY, String.format("ComparativeAnalysisTest-%d.log", System.currentTimeMillis())); -//// } -// private static final Logger LOG = LoggerFactory.getLogger(ConnectionTest.class); -// -// Set<Thread> runningThreads = new HashSet<Thread>(); -// -// @BeforeClass -// public static void setUpBeforeClass() throws Exception { -// -// } -// -// @AfterClass -// public static void tearDownAfterClass() throws Exception { -// -// } -// -// @Before -// public void setUp() throws Exception { -// -// } -// -// @After -// public void tearDown() throws Exception { -// -// } -// -// //@Test -// public void test01() { -// System.out.println("TEST 1: Getting ready for testing connection to Cassandra"); -// -// final CassandraConnector client = new CassandraConnector(); -// final String ipAddress = "localhost"; -// final int port = 9042; -// LOG.info("Connecting to IP Address " + ipAddress + ":" + port + "..."); -// client.connect(ipAddress, port); -// client.close(); -// System.out.println(); -// } -// -// /** -// * Tests for using jdbc as well as mdbc. In order to use, must have mysql and -// * running locally. Must have a database EMP created in the -// * mysql db. Uses "Driver.getConnection(com.mysql.jdbc.Driver)" for jdbc connection -// * -// */ -// //@Test -// public void test02() { -// System.out.println("TEST 2: Getting ready for testing connection via jdbc"); -// // JDBC driver name and database URL -// final String JDBC_DRIVER = "com.mysql.jdbc.Driver"; -// final String DB_URL = "jdbc:mysql://localhost/EMP"; -// -// // Database credentials -// final String USER = "alice"; -// final String PASS = "bob"; -// Properties connectionProps = new Properties(); -// connectionProps.put("user", USER); -// connectionProps.put("password", PASS); -// -// System.out.println("Connecting directly to database..."); -// connectViaDriverManager(JDBC_DRIVER, DB_URL, connectionProps); -// System.out.println(); -// } -// -// /** -// * Performs same test as @test02() except this test uses mdbc. -// * -// * In order to use, must have mysql and Cassandra services running locally. Must -// * have a database EMP created in the mysql db. Uses -// * "Driver.getConnection(org.onap.music.mdbc.ProxyDriver)" for mdbc -// * connection -// */ -// //@Test -// public void test03() { -// System.out.println("TEST 3: Getting ready for testing connection via mdbc"); -// // Database credentials -// final String USER = "alice"; -// final String PASS = "bob"; -// Properties connectionProps = new Properties(); -// connectionProps.put("user", USER); -// connectionProps.put("password", PASS); -// -// final String MDBC_DRIVER = "org.onap.music.mdbc.ProxyDriver"; -// final String MDBC_DB_URL = "jdbc:mdbc://localhost/TEST"; -// final String MDBC_DB_MIXIN = "mysql"; -// connectionProps.put("MDBC_DB_MIXIN", MDBC_DB_MIXIN); -// -// System.out.println("Connecting to database via mdbc"); -// connectViaDriverManager(MDBC_DRIVER, MDBC_DB_URL, connectionProps); -// System.out.println(); -// } -// -// /** -// * Performs same test as @test02() except this test uses mdbc. -// * -// * In order to use, must have mysql and Cassandra services running locally. Must -// * have a database EMP created in the mysql db. Uses -// * "Driver.getConnection(org.onap.music.mdbc.ProxyDriver)" for mdbc -// * connection -// * -// * Uses preparedStatements -// */ -// //@Test -// public void test03point5() { -// System.out.println("TEST 3.5: Getting ready for testing connection via mdbc w/ PreparedStatement"); -// // Database credentials -// final String USER = "alice"; -// final String PASS = "bob"; -// Properties connectionProps = new Properties(); -// connectionProps.put("user", USER); -// connectionProps.put("password", PASS); -// -// final String MDBC_DRIVER = "org.onap.music.mdbc.ProxyDriver"; -// final String MDBC_DB_URL = "jdbc:mdbc://localhost/EMP"; -// //final String MDBC_DRIVER = "org.h2.Driver"; -// //final String MDBC_DB_URL = "jdbc:h2:tcp://localhost:9092/~/test"; -// final String MDBC_DB_MIXIN = "mysql"; -// connectionProps.put("MDBC_DB_MIXIN", MDBC_DB_MIXIN); -// -// System.out.println("Connecting to database via mdbc"); -// Connection conn = null; -// PreparedStatement stmt = null; -// try { -// //STEP 2: Register JDBC driver -// Class.forName(MDBC_DRIVER); -// -// //STEP 3: Open a connection -// conn = DriverManager.getConnection(MDBC_DB_URL, connectionProps); -// conn.setAutoCommit(false); -// -// //STEP 4: Execute a query -// System.out.println("Inserting into DB"); -// stmt = conn.prepareStatement("INSERT INTO EMPLOYEE (id, first, last, age) VALUES (?, ?, ?, ?)"); -// stmt.setString(1, null); -// stmt.setString(2, "John"); -// stmt.setString(3, "Smith"); -// stmt.setInt(4, 20); -// stmt.execute(); -// -// System.out.println("Inserting again into DB"); -// stmt.setString(2, "Jane"); -// stmt.setInt(4, 30); -// stmt.execute(); -// -// stmt.close(); -// -// conn.commit(); -// -// System.out.println("Querying the DB"); -// stmt = conn.prepareStatement("SELECT id, first, last, age FROM EMPLOYEE WHERE age < ?"); -// stmt.setInt(1, 25); -// ResultSet rs = stmt.executeQuery(); -// //STEP 5: Extract data from result set -// while(rs.next()) { -// //Retrieve by column name -// int id = rs.getInt("id"); -// int age = rs.getInt("age"); -// String first = rs.getString("first"); -// String last = rs.getString("last"); -// -// //Display values -// //* -// System.out.print("ID: " + id); -// System.out.print(", Age: " + age); -// System.out.print(", First: " + first); -// System.out.println(", Last: " + last); -// //*/ -// } -// -// System.out.println("Querying again"); -// stmt.setInt(1, 35); -// rs = stmt.executeQuery(); -// //STEP 5: Extract data from result set -// while(rs.next()) { -// //Retrieve by column name -// int id = rs.getInt("id"); -// int age = rs.getInt("age"); -// String first = rs.getString("first"); -// String last = rs.getString("last"); -// -// //Display values -// //* -// System.out.print("ID: " + id); -// System.out.print(", Age: " + age); -// System.out.print(", First: " + first); -// System.out.println(", Last: " + last); -// //*/ -// } -// -// -// //sql = "DELETE FROM EMPLOYEE WHERE first = \"John\" and last = \"Smith\""; -// //stmt.execute(sql); -// -// //sql = "DROP TABLE IF EXISTS EMPLOYEE"; -// //stmt.execute(sql); -// -// //STEP 6: Clean-up environment -// rs.close(); -// stmt.close(); -// conn.close(); -// } catch(SQLException se) { -// //Handle errors for JDBC -// se.printStackTrace(); -// } catch (Exception e) { -// //Handle errors for Class.forName -// e.printStackTrace(); -// } finally { -// //finally block used to close resources -// try { -// if(stmt!=null) -// stmt.close(); -// } catch(SQLException se2) { -// } -// try { -// if(conn!=null) -// conn.close(); -// } catch(SQLException se) { -// se.printStackTrace(); -// } -// } -// System.out.println("Done"); -// } -// -// -// /** -// * Connects to a generic database. Can be used for mdbc or jdbc -// * @param DBC_DRIVER the driver for which to register (Class.forName(DBC_DRIVER)) -// * @param DB_URL the URL for the database we are testing -// * @param connectionProps -// */ -// private void connectViaDriverManager(final String DBC_DRIVER, final String DB_URL, Properties connectionProps) { -// Connection conn = null; -// Statement stmt = null; -// try { -// -// //Server server = Server.createTcpServer("-tcpAllowOthers").start(); -// //STEP 2: Register JDBC driver -// Class.forName(DBC_DRIVER); -// -// //STEP 3: Open a connection -// conn = DriverManager.getConnection(DB_URL, connectionProps); -// conn.setAutoCommit(false); -// -// //STEP 4: Execute a query -// stmt = conn.createStatement(); -// String sql; -// -// //sql = "DROP TABLE EMPLOYEE"; -// //stmt.execute(sql); -// -// sql = "CREATE TABLE IF NOT EXISTS EMPLOYEE (id INT primary key, first VARCHAR(20), last VARCHAR(20), age INT);"; -// stmt.execute(sql); -// -// sql = "INSERT INTO EMPLOYEE (id, first, last, age) VALUES (\"34\", \"Jane4\", \"Doe4\", \"40\")"; -// stmt.execute(sql); -// -// sql = "SELECT id, first, last, age FROM EMPLOYEE"; -// ResultSet rs = stmt.executeQuery(sql); -// -// //STEP 5: Extract data from result set -// while(rs.next()) { -// //Retrieve by column name -// int id = rs.getInt("id"); -// int age = rs.getInt("age"); -// String first = rs.getString("first"); -// String last = rs.getString("last"); -// -// //Display values -// //* -// System.out.print("ID: " + id); -// System.out.print(", Age: " + age); -// System.out.print(", First: " + first); -// System.out.println(", Last: " + last); -// //*/ -// -// } -// //sql = "DELETE FROM EMPLOYEE WHERE first = \"John\" and last = \"Smith\""; -// //stmt.execute(sql); -// -// //sql = "DROP TABLE IF EXISTS EMPLOYEE"; -// //stmt.execute(sql); -// -// conn.commit(); -// -// //STEP 6: Clean-up environment -// rs.close(); -// stmt.close(); -// conn.close(); -// } catch(SQLException se) { -// //Handle errors for JDBC -// se.printStackTrace(); -// } catch (Exception e) { -// //Handle errors for Class.forName -// e.printStackTrace(); -// } finally { -// //finally block used to close resources -// try { -// if(stmt!=null) -// stmt.close(); -// } catch(SQLException se2) { -// } -// try { -// if(conn!=null) -// conn.close(); -// } catch(SQLException se) { -// se.printStackTrace(); -// } -// } -// } -// -// -// -// /** -// * Must be mysql datasource -// * @throws Exception -// */ -// //@Test -// public void test04() throws Exception { -// String dbConnectionName = "testing"; -// String dbUserId = "alice"; -// String dbPasswd = "bob"; -// String db_url = "jdbc:mysql://localhost/EMP"; -// MysqlDataSource dataSource = new MysqlDataSource(); -// dataSource.setUser(dbUserId); -// dataSource.setPassword(dbPasswd); -// dataSource.setURL(db_url); -// -// -// Connection con = dataSource.getConnection(); -// Statement st = con.createStatement(); -// ResultSet rs = null; -// -// //FIXME CREATE EMPLOYEE TABLE -// -// if (st.execute("insert into EMPLOYEE values (\"John Doe\");")) { -// rs = st.getResultSet(); -// } -// -// rs = st.executeQuery("select * from EMPLOYEE;"); -// while (rs.next()) { -// System.out.println(rs.getString("name")); -// } -// -// if (st.execute("DELETE FROM EMPLOYEE")) { -// rs = st.getResultSet(); -// } -// rs.close(); -// st.close(); -// con.close(); -// } -// -// /** -// * Test connection to mysql datasource class -// * @throws Exception -// */ -// @Test -// public void test05() throws Exception { -// String dbConnectionName = "testing"; -// String dbUserId = "alice"; -// String dbPasswd = "bob"; -// String db_url = "jdbc:mdbc://localhost/EMP"; -// String db_type = "mysql"; -// MdbcDataSource dataSource = new MdbcDataSource(); -// dataSource.setUser(dbUserId); -// dataSource.setPassword(dbPasswd); -// dataSource.setURL(db_url); -// dataSource.setDBType(db_type); -// -// Connection con = dataSource.getConnection(); -// Statement st = con.createStatement(); -// ResultSet rs = null; -// -// if (st.execute("insert into EMPLOYEE values (\"John Doe\");")) { -// rs = st.getResultSet(); -// } -// -// rs = st.executeQuery("select * from EMPLOYEE;"); -// while (rs.next()) { -// System.out.println(rs.getString("name")); -// } -// -// if (st.execute("DELETE FROM EMPLOYEE")) { -// rs = st.getResultSet(); -// } -// rs.close(); -// st.close(); -// con.close(); -// } -} diff --git a/src/main/java/org/onap/music/mdbc/tests/MAIN.java b/src/main/java/org/onap/music/mdbc/tests/MAIN.java deleted file mode 100755 index 160868b..0000000 --- a/src/main/java/org/onap/music/mdbc/tests/MAIN.java +++ /dev/null @@ -1,106 +0,0 @@ -package org.onap.music.mdbc.tests; - -import java.io.FileInputStream; -import java.io.InputStream; -import java.lang.reflect.Constructor; -import java.util.ArrayList; -import java.util.List; - -import org.apache.log4j.Logger; -import org.json.JSONArray; -import org.json.JSONObject; -import org.json.JSONTokener; - -/** - * Run all the tests against all the configurations specified in /tests.json. - * - * @author Robert Eby - */ -public class MAIN { - public static final String CONFIG = "/tests.json"; - - /** - * This class runs all the tests against all the configurations specified in /tests.json. - * It assumes that a copy of Cassandra is running locally on port 9042, that a copy of H2 - * server is is running locally on port 8082, and that a copy of MySQL is running locally - * on port 3306. These can be adjusted by editing the /tests.json file. - * - * @param args command line arguments - * @throws Exception if anything goes wrong - */ - public static void main(String[] args) throws Exception { - new MAIN(args).run(); - System.exit(0); - } - - private JSONArray configs; - private List<Test> tests; - private int total_success, total_failure; - - public MAIN(String[] args) throws Exception { - configs = null; - tests = new ArrayList<Test>(); - total_success = total_failure = 0; - - InputStream is = null; - if (args.length == 0) { - is = this.getClass().getResourceAsStream(CONFIG); - } else { - is = new FileInputStream(args[0]); - } - if (is != null) { - JSONObject jo = new JSONObject(new JSONTokener(is)); - is.close(); - configs = jo.getJSONArray("configs"); - - JSONArray ja = jo.getJSONArray("tests"); - for (int i = 0; i < ja.length(); i++) { - Class<?> cl = Class.forName(ja.getString(i).trim()); - if (cl != null) { - Constructor<?> con = cl.getConstructor(); - tests.add((Test) con.newInstance()); - } - } - } else { - String conf = (args.length == 0) ? CONFIG : args[0]; - throw new Exception("Cannot find configuration resource: "+conf); - } - } - public void run() { - Logger logger = Logger.getLogger(this.getClass()); - for (int ix = 0; ix < configs.length(); ix++) { - JSONObject config = configs.getJSONObject(ix); - int succ = 0, fail = 0; - logger.info("*** Testing with configuration: "+config.getString("description")); - System.out.println("Testing with configuration: "+config.getString("description")); - for (Test t : tests) { - String nm = t.getName() + " ............................................................"; - System.out.print(" Test: "+nm.substring(0, 60)); - try { - List<String> msgs = t.run(config); - if (msgs == null || msgs.size() == 0) { - succ++; - System.out.println(" OK!"); - } else { - fail++; - System.out.println(" Fail!"); - System.out.flush(); - for (String m : msgs) { - System.out.println(" "+m); - } - System.out.flush(); - } - } catch (Exception x) { - fail++; - System.out.println(" Fail!"); - } - } - System.out.println(); - total_success += succ; - total_failure += fail; - } - String m = "Testing completed: "+total_success+" successful tests, "+total_failure+": failures."; - logger.info(m); - System.out.println(m); - } -} diff --git a/src/main/java/org/onap/music/mdbc/tests/Test.java b/src/main/java/org/onap/music/mdbc/tests/Test.java deleted file mode 100755 index 67a78c8..0000000 --- a/src/main/java/org/onap/music/mdbc/tests/Test.java +++ /dev/null @@ -1,105 +0,0 @@ -package org.onap.music.mdbc.tests; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.List; -import java.util.Properties; - -import org.json.JSONArray; -import org.json.JSONObject; - -/** - * Provides the abstract interface for a Test, as well as some common functions. - * - * @author Robert Eby - */ -public abstract class Test { - public static final String MDBC_DRIVER = "org.onap.music.mdbc.ProxyDriver"; - - /** - * Each test derived from this class must implement this method, - * which runs the test and produces a list of error messages. - * - * @param config a JSONObject describing the configuration to use for this run of the test - * @return the list of messages. If the list is empty, the test is considered to have run - * successfully. - */ - abstract public List<String> run(JSONObject config); - - public String getName() { - String s = this.getClass().getName(); - return s.replaceAll("org.onap.music.mdbc.tests.", ""); - } - - public Properties buildProperties(JSONObject config, int i) { - Properties p = new Properties(); - for (String key : config.keySet()) { - if (key.equals("connections")) { - JSONArray ja = config.getJSONArray("connections"); - JSONObject connection = ja.getJSONObject(i); - for (String key2 : connection.keySet()) { - p.setProperty(key2, connection.getString(key2)); - } - } else { - p.setProperty(key, config.getString(key)); - } - } - return p; - } - - public Connection getDBConnection(Properties pr) throws SQLException, ClassNotFoundException { - Class.forName(MDBC_DRIVER); - String url = pr.getProperty("url"); - return DriverManager.getConnection(url, pr); - } - - public void assertNotNull(Object o) throws Exception { - if (o == null) - throw new Exception("Object is null"); - } - - public void assertTableContains(int connid, Connection conn, String tbl, Object... kv) throws Exception { - ResultSet rs = getRow(conn, tbl, kv); - boolean throwit = !rs.next(); - rs.close(); - if (throwit) { - throw new Exception("Conn id "+connid+" Table "+tbl+" does not have a row with "+catkeys(kv)); - } - } - public void assertTableDoesNotContain(int connid, Connection conn, String tbl, Object... kv) throws Exception { - boolean throwit = true; - try { - assertTableContains(connid, conn, tbl, kv); - } catch (Exception x) { - throwit = false; - } - if (throwit) { - throw new Exception("Conn id "+connid+" Table "+tbl+" does have a row with "+catkeys(kv)); - } - } - public ResultSet getRow(Connection conn, String tbl, Object... kv) throws SQLException { - Statement stmt = conn.createStatement(); - StringBuilder sql = new StringBuilder("SELECT * FROM ") - .append(tbl) - .append(" WHERE ") - .append(catkeys(kv)); - return stmt.executeQuery(sql.toString()); - } - public String catkeys(Object... kv) { - StringBuilder sql = new StringBuilder(); - String pfx = ""; - for (int i = 0; (i+1) < kv.length; i += 2) { - sql.append(pfx).append(kv[i]).append("="); - if (kv[i+1] instanceof String) { - sql.append("'").append(kv[i+1]).append("'"); - } else { - sql.append(kv[i+1].toString()); - } - pfx = " AND "; - } - return sql.toString(); - } -} diff --git a/src/main/java/org/onap/music/mdbc/tests/Test_Delete.java b/src/main/java/org/onap/music/mdbc/tests/Test_Delete.java deleted file mode 100755 index 6417ab7..0000000 --- a/src/main/java/org/onap/music/mdbc/tests/Test_Delete.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.onap.music.mdbc.tests; - -import java.sql.Connection; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.List; - -import org.json.JSONArray; -import org.json.JSONObject; - -/** - * Test that DELETEs work on the original DB, and are correctly copied to replica DBs. - * - * @author Robert Eby - */ -public class Test_Delete extends Test { - private final String TBL = "DELTABLE"; - - @Override - public List<String> run(JSONObject config) { - List<String> msgs = new ArrayList<String>(); - JSONArray connections = config.getJSONArray("connections"); - Connection[] conn = new Connection[connections.length()]; - Statement[] stmt = new Statement[conn.length]; - try { - for (int i = 0; i < conn.length; i++) { - conn[i] = getDBConnection(buildProperties(config, i)); - assertNotNull(conn[i]); - stmt[i] = conn[i].createStatement(); - assertNotNull(stmt[i]); - } - - try { - for (int i = 0; i < conn.length; i++) { - conn[i].setAutoCommit(true); - stmt[i].execute("CREATE TABLE IF NOT EXISTS DELTABLE(ID_ varchar(255), RANDOMTXT varchar(255), primary key (ID_))"); - } - stmt[0].execute("INSERT INTO DELTABLE(ID_, RANDOMTXT) VALUES('1', 'Everything''s Negotiable Except Cutting Medicaid')"); - stmt[0].execute("INSERT INTO DELTABLE(ID_, RANDOMTXT) VALUES('2', 'Can a Sideways Elevator Help Designers Build Taller Skyscrapers?')"); - stmt[0].execute("INSERT INTO DELTABLE(ID_, RANDOMTXT) VALUES('3', 'Can a Bernie Sanders Ally Win the Maryland Governor''s Mansion?')"); - for (int i = 0; i < conn.length; i++) { - assertTableContains(i, conn[i], TBL, "ID_", "1"); - assertTableContains(i, conn[i], TBL, "ID_", "2"); - assertTableContains(i, conn[i], TBL, "ID_", "3"); - } - - stmt[0].execute("DELETE FROM DELTABLE WHERE ID_ = '1'"); - for (int i = 0; i < conn.length; i++) { - assertTableDoesNotContain(i, conn[i], TBL, "ID_", "1"); - assertTableContains(i, conn[i], TBL, "ID_", "2"); - assertTableContains(i, conn[i], TBL, "ID_", "3"); - } - } catch (Exception e) { - msgs.add(e.toString()); - } finally { - for (int i = 0; i < stmt.length; i++) { - if (stmt[i] != null) - stmt[i].close(); - } - for (int i = 0; i < conn.length; i++) { - if (conn[i] != null) - conn[i].close(); - } - } - } catch (Exception e) { - msgs.add(e.toString()); - } - return msgs; - } -} diff --git a/src/main/java/org/onap/music/mdbc/tests/Test_Insert.java b/src/main/java/org/onap/music/mdbc/tests/Test_Insert.java deleted file mode 100755 index 1ea0908..0000000 --- a/src/main/java/org/onap/music/mdbc/tests/Test_Insert.java +++ /dev/null @@ -1,94 +0,0 @@ -package org.onap.music.mdbc.tests; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.List; - -import org.json.JSONArray; -import org.json.JSONObject; - -/** - * Test that INSERTs work to the original DB, and are correctly copied to replica DBs. - * - * @author Robert Eby - */ -public class Test_Insert extends Test { - private final String PERSON = "PERSON"; - private final String SONG = "SONG"; - - @Override - public List<String> run(JSONObject config) { - List<String> msgs = new ArrayList<String>(); - JSONArray connections = config.getJSONArray("connections"); - Connection[] conn = new Connection[connections.length()]; - Statement[] stmt = new Statement[conn.length]; - try { - for (int i = 0; i < conn.length; i++) { - conn[i] = getDBConnection(buildProperties(config, i)); - assertNotNull(conn[i]); - stmt[i] = conn[i].createStatement(); - assertNotNull(stmt[i]); - } - - try { - for (int i = 0; i < conn.length; i++) { - conn[i].setAutoCommit(true); - stmt[i].execute("CREATE TABLE IF NOT EXISTS PERSON(ID_ varchar(255), NAME varchar(255), SSN varchar(255), primary key (ID_))"); - } - stmt[0].execute("INSERT INTO PERSON(ID_, NAME, SSN) VALUES('1', 'Zaphod', '111-22-3333')"); - stmt[0].execute("INSERT INTO PERSON(ID_, NAME, SSN) VALUES('2', 'Ripley', '444-55-6666')"); - stmt[0].execute("INSERT INTO PERSON(ID_, NAME, SSN) VALUES('3', 'Spock', '777-88-9999')"); - for (int i = 0; i < conn.length; i++) { - assertTableContains(i, conn[i], PERSON, "ID_", "1"); - assertTableContains(i, conn[i], PERSON, "ID_", "2"); - assertTableContains(i, conn[i], PERSON, "ID_", "3"); - } - - stmt[0].execute("UPDATE PERSON SET NAME = 'Jabba' WHERE ID_ = '2'"); - for (int i = 0; i < conn.length; i++) { - ResultSet rs = getRow(conn[i], PERSON, "ID_", "2"); - if (rs.next()) { - String v = rs.getString("NAME"); - if (!v.equals("Jabba")) - throw new Exception("Table PERSON, row with ID_ = '2' was not updated."); - } else { - throw new Exception("Table PERSON does not have a row with ID_ = '2'"); - } - rs.close(); - } - - for (int i = 0; i < conn.length; i++) { - stmt[i].execute("CREATE TABLE IF NOT EXISTS SONG(ID_ varchar(255), PREF int, ARIA varchar(255), primary key (ID_, PREF))"); - } - stmt[0].execute("INSERT INTO SONG(ID_, PREF, ARIA) VALUES('1', 1, 'Nessun Dorma')"); - stmt[0].execute("INSERT INTO SONG(ID_, PREF, ARIA) VALUES('2', 5, 'O mio Bambino Caro')"); - stmt[0].execute("INSERT INTO SONG(ID_, PREF, ARIA) VALUES('2', 2, 'Sweet Georgia Brown')"); - stmt[0].execute("INSERT INTO SONG(ID_, PREF, ARIA) VALUES('3', 77, 'Mud Flats Blues')"); - stmt[0].execute("INSERT INTO SONG(ID_, PREF, ARIA) VALUES('3', 69, 'Me & Mr Jones')"); - for (int i = 0; i < conn.length; i++) { - assertTableContains(i, conn[i], SONG, "ID_", "1", "PREF", 1); - assertTableContains(i, conn[i], SONG, "ID_", "2", "PREF", 5); - assertTableContains(i, conn[i], SONG, "ID_", "2", "PREF", 2); - assertTableContains(i, conn[i], SONG, "ID_", "3", "PREF", 77); - assertTableContains(i, conn[i], SONG, "ID_", "3", "PREF", 69); - } - } catch (Exception e) { - msgs.add(e.toString()); - } finally { - for (int i = 0; i < stmt.length; i++) { - if (stmt[i] != null) - stmt[i].close(); - } - for (int i = 0; i < conn.length; i++) { - if (conn[i] != null) - conn[i].close(); - } - } - } catch (Exception e) { - msgs.add(e.toString()); - } - return msgs; - } -} diff --git a/src/main/java/org/onap/music/mdbc/tests/Test_Transactions.java b/src/main/java/org/onap/music/mdbc/tests/Test_Transactions.java deleted file mode 100755 index 787f1f5..0000000 --- a/src/main/java/org/onap/music/mdbc/tests/Test_Transactions.java +++ /dev/null @@ -1,74 +0,0 @@ -package org.onap.music.mdbc.tests; - -import java.sql.Connection; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.List; - -import org.json.JSONArray; -import org.json.JSONObject; - -/** - * Test that transactions work between the original DB, and replica DBs. - * - * @author Robert Eby - */ -public class Test_Transactions extends Test { - private final String TBL = "TRANSTEST"; - - @Override - public List<String> run(JSONObject config) { - List<String> msgs = new ArrayList<String>(); - JSONArray connections = config.getJSONArray("connections"); - Connection[] conn = new Connection[connections.length()]; - Statement[] stmt = new Statement[conn.length]; - try { - for (int i = 0; i < conn.length; i++) { - conn[i] = getDBConnection(buildProperties(config, i)); - assertNotNull(conn[i]); - stmt[i] = conn[i].createStatement(); - assertNotNull(stmt[i]); - } - - try { - for (int i = 0; i < conn.length; i++) { - conn[i].setAutoCommit(true); - stmt[i].execute("CREATE TABLE IF NOT EXISTS TRANSTEST(ID_ varchar(12), STUFF varchar(255), primary key (ID_))"); - conn[i].setAutoCommit(false); - } - stmt[0].execute("INSERT INTO TRANSTEST(ID_, STUFF) VALUES('1', 'CenturyLink Now Under Fire on All Sides For Fraudulent Billing')"); - stmt[0].execute("INSERT INTO TRANSTEST(ID_, STUFF) VALUES('2', 'Netflix Now in Half of All Broadband Households, Study Says')"); - stmt[0].execute("INSERT INTO TRANSTEST(ID_, STUFF) VALUES('3', 'Private Data Of 6 Million Verizon Customers Exposed')"); - assertTableContains(0, conn[0], TBL, "ID_", "1"); - assertTableContains(0, conn[0], TBL, "ID_", "2"); - assertTableContains(0, conn[0], TBL, "ID_", "3"); - for (int i = 1; i < conn.length; i++) { - assertTableDoesNotContain(i, conn[i], TBL, "ID_", "1"); - assertTableDoesNotContain(i, conn[i], TBL, "ID_", "2"); - assertTableDoesNotContain(i, conn[i], TBL, "ID_", "3"); - } - conn[0].commit(); - for (int i = 0; i < conn.length; i++) { - assertTableContains(i, conn[i], TBL, "ID_", "1"); - assertTableContains(i, conn[i], TBL, "ID_", "2"); - assertTableContains(i, conn[i], TBL, "ID_", "3"); - } - - } catch (Exception e) { - msgs.add(e.toString()); - } finally { - for (int i = 0; i < stmt.length; i++) { - if (stmt[i] != null) - stmt[i].close(); - } - for (int i = 0; i < conn.length; i++) { - if (conn[i] != null) - conn[i].close(); - } - } - } catch (Exception e) { - msgs.add(e.toString()); - } - return msgs; - } -} diff --git a/src/main/java/org/onap/music/mdbc/tests/package-info.java b/src/main/java/org/onap/music/mdbc/tests/package-info.java deleted file mode 100755 index 7e0b84d..0000000 --- a/src/main/java/org/onap/music/mdbc/tests/package-info.java +++ /dev/null @@ -1,165 +0,0 @@ -/** - * <p> - * This package provides a testing harness to test the various features of MDBC against - * multiple combinations of database and MUSIC mixins. The configurations (consisting of - * database information and mixin combinations) to test, as well as the specific tests to - * run are all defined in the configuration file <code>test.json</code>. - * </p> - * <p> - * To run the tests against all the configurations specified in /tests.json, do the following: - * </p> - * <pre> - * java org.onap.music.mdbc.tests.MAIN [ configfile ] - * </pre> - * <p> - * It is assumed that a copy of Cassandra is running locally on port 9042, - * that a copy of H2 server is is running locally on port 8082, - * and that a copy of MySQL (or MariaDB) is running locally on port 3306. - * These can be adjusted by editing the /tests.json file. - * </p> - * <p> - * When building a copy of MDBC for production use, this package can be safely removed. - * </p> - * <p> - * The initial copy of <i>tests.json</i> is as follows: - * </p> - * <pre> - * { - * "tests": [ - * "org.onap.music.mdbc.tests.Test_Insert", - * "org.onap.music.mdbc.tests.Test_Delete", - * "org.onap.music.mdbc.tests.Test_Transactions" - * ], - * "configs": [ - * { - * "description": "H2 with Cassandra with two connections", - * "MDBC_DB_MIXIN": "h2", - * "MDBC_MUSIC_MIXIN": "cassandra", - * "replicas": "0,1", - * "music_keyspace": "mdbctest1", - * "music_address": "localhost", - * "music_rfactor": "1", - * "connections": [ - * { - * "name": "Connection 0", - * "url": "jdbc:mdbc:mem:db0", - * "user": "", - * "password": "", - * "myid": "0" - * }, - * { - * "name": "Connection 1", - * "url": "jdbc:mdbc:mem:db1", - * "user": "", - * "password": "", - * "myid": "1" - * } - * ] - * }, - * { - * "description": "H2 with Cassandra2 with three connections", - * "MDBC_DB_MIXIN": "h2", - * "MDBC_MUSIC_MIXIN": "cassandra2", - * "replicas": "0,1,2", - * "music_keyspace": "mdbctest2", - * "music_address": "localhost", - * "music_rfactor": "1", - * "user": "", - * "password": "", - * "connections": [ - * { - * "name": "Connection 0", - * "url": "jdbc:mdbc:mem:db0", - * "myid": "0" - * }, - * { - * "name": "Connection 1", - * "url": "jdbc:mdbc:mem:db1", - * "myid": "1" - * }, - * { - * "name": "Connection 2", - * "url": "jdbc:mdbc:mem:db2", - * "myid": "2" - * } - * ] - * }, - * { - * "description": "H2 Server with Cassandra2 with two connections", - * "MDBC_DB_MIXIN": "h2server", - * "MDBC_MUSIC_MIXIN": "cassandra2", - * "replicas": "0,1", - * "music_keyspace": "mdbctest3", - * "music_address": "localhost", - * "music_rfactor": "1", - * "connections": [ - * { - * "name": "Connection 0", - * "url": "jdbc:mdbc:tcp://localhost/mdbc0", - * "user": "", - * "password": "", - * "myid": "0" - * }, - * { - * "name": "Connection 1", - * "url": "jdbc:mdbc:tcp://localhost/mdbc1", - * "user": "", - * "password": "", - * "myid": "1" - * } - * ] - * }, - * { - * "description": "MySQL with Cassandra2 with two connections", - * "MDBC_DB_MIXIN": "mysql", - * "MDBC_MUSIC_MIXIN": "cassandra2", - * "replicas": "0,1,2", - * "music_keyspace": "mdbctest4", - * "music_address": "localhost", - * "music_rfactor": "1", - * "user": "root", - * "password": "abc123", - * "connections": [ - * { - * "name": "Connection 0", - * "url": "jdbc:mdbc://127.0.0.1:3306/mdbc", - * "myid": "0" - * }, - * { - * "name": "Connection 1", - * "url": "jdbc:mdbc://127.0.0.1:3306/mdbc2", - * "myid": "1" - * } - * ] - * }, - * { - * "description": "H2 (DB #1) and MySQL (DB #2) with Cassandra2", - * "MDBC_MUSIC_MIXIN": "cassandra2", - * "replicas": "0,1", - * "music_keyspace": "mdbctest5", - * "music_address": "localhost", - * "music_rfactor": "1", - * "connections": [ - * { - * "name": "Connection 0", - * "MDBC_DB_MIXIN": "h2", - * "url": "jdbc:mdbc:mem:db9", - * "user": "", - * "password": "", - * "myid": "0" - * }, - * { - * "name": "Connection 1", - * "MDBC_DB_MIXIN": "mysql", - * "url": "jdbc:mdbc://127.0.0.1:3306/mdbc3", - * "user": "root", - * "password": "abc123", - * "myid": "1" - * } - * ] - * } - * ] - * } - * </pre> - */ -package org.onap.music.mdbc.tests; diff --git a/src/main/java/org/onap/music/mdbc/tools/CreateNodeConfigurations.java b/src/main/java/org/onap/music/mdbc/tools/CreateNodeConfigurations.java deleted file mode 100644 index c4ebf46..0000000 --- a/src/main/java/org/onap/music/mdbc/tools/CreateNodeConfigurations.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.onap.music.mdbc.tools; - -import org.onap.music.exceptions.MDBCServiceException; -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.mdbc.configurations.NodeConfiguration; -import org.onap.music.mdbc.configurations.TablesConfiguration; -import com.beust.jcommander.JCommander; -import com.beust.jcommander.Parameter; - -import java.io.FileNotFoundException; -import java.util.List; - -public class CreateNodeConfigurations { - public static final EELFLoggerDelegate LOG = EELFLoggerDelegate.getLogger(CreateNodeConfigurations.class); - - private String tables; - @Parameter(names = { "-t", "--table-configurations" }, required = true, - description = "This is the input file that is going to have the configuration for all the tables and partitions") - private String tableConfigurationsFile; - @Parameter(names = { "-b", "--basename" }, required = true, - description = "This base name for all the outputs files that are going to be created") - private String basename; - @Parameter(names = { "-o", "--output-dir" }, required = true, - description = "This is the output directory that is going to contain all the configuration file to be generated") - private String outputDirectory; - @Parameter(names = { "-h", "-help", "--help" }, help = true, - description = "Print the help message") - private boolean help = false; - - private TablesConfiguration inputConfig; - - public CreateNodeConfigurations(){} - - - public void readInput(){ - try { - inputConfig = TablesConfiguration.readJsonFromFile(tableConfigurationsFile); - } catch (FileNotFoundException e) { - LOG.error("Input file is invalid or not found"); - System.exit(1); - } - } - - public void createAndSaveNodeConfigurations(){ - List<NodeConfiguration> nodes = null; - try { - nodes = inputConfig.initializeAndCreateNodeConfigurations(); - } catch (MDBCServiceException e) { - e.printStackTrace(); - } - int counter = 0; - for(NodeConfiguration nodeConfig : nodes){ - String name = (nodeConfig.nodeName==null||nodeConfig.nodeName.isEmpty())?Integer.toString(counter++): nodeConfig.nodeName; - nodeConfig.saveToFile(outputDirectory+"/"+basename+"-"+name+".json"); - } - } - - public static void main(String[] args) { - CreateNodeConfigurations configs = new CreateNodeConfigurations(); - @SuppressWarnings("deprecation") - JCommander jc = new JCommander(configs, args); - if (configs.help) { - jc.usage(); - System.exit(1); - return; - } - configs.readInput(); - configs.createAndSaveNodeConfigurations(); - } -} diff --git a/src/main/java/org/onap/music/mdbc/tools/CreatePartition.java b/src/main/java/org/onap/music/mdbc/tools/CreatePartition.java deleted file mode 100644 index afd1a47..0000000 --- a/src/main/java/org/onap/music/mdbc/tools/CreatePartition.java +++ /dev/null @@ -1,58 +0,0 @@ -package org.onap.music.mdbc.tools; - -import org.onap.music.logging.EELFLoggerDelegate; -import org.onap.music.mdbc.configurations.NodeConfiguration; -import com.beust.jcommander.JCommander; -import com.beust.jcommander.Parameter; - -import java.util.UUID; - -public class CreatePartition { - public static final EELFLoggerDelegate LOG = EELFLoggerDelegate.getLogger(CreatePartition.class); - - @Parameter(names = { "-t", "--tables" }, required = true, - description = "This is the tables that are assigned to this ") - private String tables; - @Parameter(names = { "-f", "--file" }, required = true, - description = "This is the output file that is going to have the configuration for the ranges") - private String file; - @Parameter(names = { "-i", "--mri-index" }, required = true, - description = "Index in the Mri Table") - private String mriIndex; - @Parameter(names = { "-m", "--mri-table-name" }, required = true, - description = "Mri Table name") - private String mriTable; - @Parameter(names = { "-r", "--music-tx-digest-table-name" }, required = true, - description = "Music Transaction Digest Table name") - private String mtxdTable; - @Parameter(names = { "-h", "-help", "--help" }, help = true, - description = "Print the help message") - private boolean help = false; - - NodeConfiguration config; - - public CreatePartition(){ - } - - public void convert(){ - config = new NodeConfiguration(tables, UUID.fromString(mriIndex),mriTable,"test","", mtxdTable); - } - - public void saveToFile(){ - config.saveToFile(file); - } - - public static void main(String[] args) { - - CreatePartition newPartition = new CreatePartition(); - @SuppressWarnings("deprecation") - JCommander jc = new JCommander(newPartition, args); - if (newPartition.help) { - jc.usage(); - System.exit(1); - return; - } - newPartition.convert(); - newPartition.saveToFile(); - } -} diff --git a/src/main/javadoc/overview.html b/src/main/javadoc/overview.html deleted file mode 100755 index 0be8c38..0000000 --- a/src/main/javadoc/overview.html +++ /dev/null @@ -1,37 +0,0 @@ -<!DOCTYPE HTML> -<html> -<body> - -<p> -MDBC is a JDBC driver that provides an application programmer a way to automatically copy -data to and from a running copy of MUSIC/Cassandra. -The driver does this by intercepting all DELETES, INSERTS, SELECTS, and UPDATES and copying -modified table rows to and from Cassandra. -</p> -<p> -MDBC can automatically adopt to the database in use, as well as the style of MUSIC interaction, -by the use of "mixins". Each JDBC Connection via MDBC specifies two mixins to use: -</p> -<ul> -<li>a Mixin that conforms to the {@link org.onap.music.mdbc.mixins.MusicInterface} specification -for communicating with MUSIC/Cassandra.</li> -<li>a Mixin that conforms to the {@link org.onap.music.mdbc.mixins.DBInterface} specification -for communicating with the underlying database in use.</li> -</ul> -<p> -More details are provided on the package pages for -<code>org.onap.music.mdbc</code> and -<code>org.onap.music.mdbc.mixins</code>. -</p> -<h2>Limitations</h2> -There are several limitations to the use of MDBC: -<ul> -<li>The <code>java.sql.Statement.executeBatch()</code> method is not supported by MDBC. -It is not prohibited either; your results will be unpredictable.</li> -<li>When used with a DB server, there is some delay as dirty row information is copied -from a table in the database, to the dirty table in Cassandra. This opens a window -during which all sorts of mischief may occur.</li> -</ul> - -</body> -</html> diff --git a/src/main/resources/META-INF/services/java.sql.Driver b/src/main/resources/META-INF/services/java.sql.Driver deleted file mode 100755 index a135284..0000000 --- a/src/main/resources/META-INF/services/java.sql.Driver +++ /dev/null @@ -1 +0,0 @@ -org.onap.music.mdbc.ProxyDriver diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties deleted file mode 100755 index 88ee415..0000000 --- a/src/main/resources/log4j.properties +++ /dev/null @@ -1,14 +0,0 @@ -log4j.rootLogger=DEBUG, stdout - -# Direct log messages to stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.Target=System.out -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n - -#log4j.appender.filelog=org.apache.log4j.DailyRollingFileAppender -#log4j.appender.filelog.file=/tmp/log -#log4j.appender.filelog.datePattern='.'yyyyMMdd -#log4j.appender.filelog.append=true -#log4j.appender.filelog.layout=org.apache.log4j.PatternLayout -#log4j.appender.filelog.layout.ConversionPattern=%d %-5p [%t] - %m%n diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml deleted file mode 100644 index df02405..0000000 --- a/src/main/resources/logback.xml +++ /dev/null @@ -1,370 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!-- - ============LICENSE_START========================================== - mdbc - =================================================================== - Copyright © 2017 AT&T Intellectual Property. All rights reserved. - =================================================================== - - Unless otherwise specified, all software contained herein is licensed - under the Apache License, Version 2.0 (the “License”); - you may not use this software except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - Unless otherwise specified, all documentation contained herein is licensed - under the Creative Commons License, Attribution 4.0 Intl. (the “License”); - you may not use this documentation except in compliance with the License. - You may obtain a copy of the License at - - https://creativecommons.org/licenses/by/4.0/ - - Unless required by applicable law or agreed to in writing, documentation - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - ============LICENSE_END============================================ - - - --> - -<configuration scan="true" scanPeriod="3 seconds" debug="true"> - <!-- - Logback files for the mdbc Driver "mdbc" - are created in directory ${catalina.base}/logs/mdbc; - e.g., apache-tomcat-8.0.35/logs/mdbc/application.log - --> - <!--<jmxConfigurator /> --> - - <!-- specify the component name --> - <property name="componentName" value="mdbc"></property> - - <!-- specify the base path of the log directory --> - <property name="logDirPrefix" value="${catalina.base}/logs"></property> - - <!-- The directories where logs are written --> - <property name="logDirectory" value="${logDirPrefix}/${componentName}" /> - <!-- Can easily relocate debug logs by modifying this path. --> - <property name="debugLogDirectory" value="${logDirPrefix}/${componentName}" /> - - <!-- log file names --> - <property name="generalLogName" value="application" /> - <property name="errorLogName" value="error" /> - <property name="metricsLogName" value="metrics" /> - <property name="auditLogName" value="audit" /> - <property name="debugLogName" value="debug" /> - <!-- - These loggers are not used in code (yet). - <property name="securityLogName" value="security" /> - <property name="policyLogName" value="policy" /> - <property name="performanceLogName" value="performance" /> - <property name="serverLogName" value="server" /> - --> - - <!-- 1610 Logging Fields Format Revisions --> - <property name="auditLoggerPattern" - value="%X{AuditLogBeginTimestamp}|%X{AuditLogEndTimestamp}|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{VirtualServerName}|%X{ServiceName}|%X{PartnerName}|%X{StatusCode}|%X{ResponseCode}|%X{ResponseDescription}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{Timer}|%X{ServerFQDN}|%X{ClientIPAddress}|%X{ClassName}|%X{Unused}|%X{ProcessKey}|%X{CustomField1}|%X{CustomField2}|%X{CustomField3}|%X{CustomField4}| %msg%n" /> - - <property name="metricsLoggerPattern" - value="%X{MetricsLogBeginTimestamp}|%X{MetricsLogEndTimestamp}|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{VirtualServerName}|%X{ServiceName}|%X{PartnerName}|%X{TargetEntity}|%X{TargetServiceName}|%X{StatusCode}|%X{ResponseCode}|%X{ResponseDescription}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{Timer}|%X{ServerFQDN}|%X{ClientIPAddress}|%X{ClassName}|%X{Unused}|%X{ProcessKey}|%X{TargetVisualEntity}|%X{CustomField1}|%X{CustomField2}|%X{CustomField3}|%X{CustomField4}| %msg%n" /> - - <property name="errorLoggerPattern" - value="%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%X{RequestId}|%thread|%X{ServiceName}|%X{PartnerName}|%X{TargetEntity}|%X{TargetServiceName}|%X{ClassName}|%X{AlertSeverity}|%X{ErrorCode}|%X{ErrorDescription}| %msg%n" /> - - <property name="defaultLoggerPattern" - value="%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%X{RequestId}|%thread|%X{ClassName}| %msg%n" /> - - <!-- use %class so library logging calls yield their class name --> - <property name="applicationLoggerPattern" - value="%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%X{RequestId}|%thread|%class{36}| %msg%n" /> - - <!-- Example evaluator filter applied against console appender --> - <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> - <encoder> - <pattern>${defaultLoggerPattern}</pattern> - </encoder> - </appender> - - <!-- ============================================================================ --> - <!-- EELF Appenders --> - <!-- ============================================================================ --> - - <!-- The EELFAppender is used to record events to the general application - log --> - - - <appender name="EELF" - class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>${logDirectory}/${generalLogName}.log</file> - <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <!-- daily rollover --> - <fileNamePattern>${logDirectory}/${generalLogName}.%d{yyyy-MM-dd}.log.zip</fileNamePattern> - - <!-- keep 30 days' worth of history capped at 3GB total size --> - <maxHistory>30</maxHistory> - <totalSizeCap>3GB</totalSizeCap> - - </rollingPolicy> - <encoder> - <pattern>${applicationLoggerPattern}</pattern> - </encoder> - </appender> - - <appender name="asyncEELF" class="ch.qos.logback.classic.AsyncAppender"> - <queueSize>256</queueSize> - <!-- Class name is part of caller data --> - <includeCallerData>true</includeCallerData> - <appender-ref ref="EELF" /> - </appender> - - <!-- EELF Security Appender. This appender is used to record security events - to the security log file. Security events are separate from other loggers - in EELF so that security log records can be captured and managed in a secure - way separate from the other logs. This appender is set to never discard any - events. --> - <!-- - <appender name="EELFSecurity" - class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>${logDirectory}/${securityLogName}.log</file> - <rollingPolicy - class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> - <fileNamePattern>${logDirectory}/${securityLogName}.%i.log.zip - </fileNamePattern> - <minIndex>1</minIndex> - <maxIndex>9</maxIndex> - </rollingPolicy> - <triggeringPolicy - class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> - <maxFileSize>5MB</maxFileSize> - </triggeringPolicy> - <encoder> - <pattern>${defaultPattern}</pattern> - </encoder> - </appender> - - <appender name="asyncEELFSecurity" class="ch.qos.logback.classic.AsyncAppender"> - <queueSize>256</queueSize> - <discardingThreshold>0</discardingThreshold> - <appender-ref ref="EELFSecurity" /> - </appender> - --> - - <!-- EELF Performance Appender. This appender is used to record performance - records. --> - <!-- - <appender name="EELFPerformance" - class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>${logDirectory}/${performanceLogName}.log</file> - <rollingPolicy - class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> - <fileNamePattern>${logDirectory}/${performanceLogName}.%i.log.zip - </fileNamePattern> - <minIndex>1</minIndex> - <maxIndex>9</maxIndex> - </rollingPolicy> - <triggeringPolicy - class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> - <maxFileSize>5MB</maxFileSize> - </triggeringPolicy> - <encoder> - <outputPatternAsHeader>true</outputPatternAsHeader> - <pattern>${defaultPattern}</pattern> - </encoder> - </appender> - <appender name="asyncEELFPerformance" class="ch.qos.logback.classic.AsyncAppender"> - <queueSize>256</queueSize> - <appender-ref ref="EELFPerformance" /> - </appender> - --> - - <!-- EELF Server Appender. This appender is used to record Server related - logging events. The Server logger and appender are specializations of the - EELF application root logger and appender. This can be used to segregate Server - events from other components, or it can be eliminated to record these events - as part of the application root log. --> - <!-- - <appender name="EELFServer" - class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>${logDirectory}/${serverLogName}.log</file> - <rollingPolicy - class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> - <fileNamePattern>${logDirectory}/${serverLogName}.%i.log.zip - </fileNamePattern> - <minIndex>1</minIndex> - <maxIndex>9</maxIndex> - </rollingPolicy> - <triggeringPolicy - class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> - <maxFileSize>5MB</maxFileSize> - </triggeringPolicy> - <encoder> - <pattern>${defaultPattern}</pattern> - </encoder> - </appender> - <appender name="asyncEELFServer" class="ch.qos.logback.classic.AsyncAppender"> - <queueSize>256</queueSize> - <appender-ref ref="EELFServer" /> - </appender> - --> - - <!-- EELF Policy Appender. This appender is used to record Policy engine - related logging events. The Policy logger and appender are specializations - of the EELF application root logger and appender. This can be used to segregate - Policy engine events from other components, or it can be eliminated to record - these events as part of the application root log. --> - <!-- - <appender name="EELFPolicy" - class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>${logDirectory}/${policyLogName}.log</file> - <rollingPolicy - class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> - <fileNamePattern>${logDirectory}/${policyLogName}.%i.log.zip - </fileNamePattern> - <minIndex>1</minIndex> - <maxIndex>9</maxIndex> - </rollingPolicy> - <triggeringPolicy - class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> - <maxFileSize>5MB</maxFileSize> - </triggeringPolicy> - <encoder> - <pattern>${defaultPattern}</pattern> - </encoder> - </appender> - <appender name="asyncEELFPolicy" class="ch.qos.logback.classic.AsyncAppender"> - <queueSize>256</queueSize> - <appender-ref ref="EELFPolicy" /> - </appender> - --> - - <!-- EELF Audit Appender. This appender is used to record audit engine - related logging events. The audit logger and appender are specializations - of the EELF application root logger and appender. This can be used to segregate - Policy engine events from other components, or it can be eliminated to record - these events as part of the application root log. --> - - <appender name="EELFAudit" - class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>${logDirectory}/${auditLogName}.log</file> - <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <!-- daily rollover --> - <fileNamePattern>${logDirectory}/${auditLogName}.%d{yyyy-MM-dd}.log.zip</fileNamePattern> - - <!-- keep 30 days' worth of history capped at 3GB total size --> - <maxHistory>30</maxHistory> - <totalSizeCap>3GB</totalSizeCap> - - </rollingPolicy> - <encoder> - <pattern>${auditLoggerPattern}</pattern> - </encoder> - </appender> - <appender name="asyncEELFAudit" class="ch.qos.logback.classic.AsyncAppender"> - <queueSize>256</queueSize> - <appender-ref ref="EELFAudit" /> - </appender> - - <appender name="EELFMetrics" - class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>${logDirectory}/${metricsLogName}.log</file> - <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <!-- daily rollover --> - <fileNamePattern>${logDirectory}/${metricsLogName}.%d{yyyy-MM-dd}.log.zip</fileNamePattern> - - <!-- keep 30 days' worth of history capped at 3GB total size --> - <maxHistory>30</maxHistory> - <totalSizeCap>3GB</totalSizeCap> - - </rollingPolicy> - <encoder> - <pattern>${metricsLoggerPattern}</pattern> - </encoder> - </appender> - - - <appender name="asyncEELFMetrics" class="ch.qos.logback.classic.AsyncAppender"> - <queueSize>256</queueSize> - <appender-ref ref="EELFMetrics"/> - </appender> - - <appender name="EELFError" - class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>${logDirectory}/${errorLogName}.log</file> - <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <!-- daily rollover --> - <fileNamePattern>${logDirectory}/${errorLogName}.%d{yyyy-MM-dd}.log.zip</fileNamePattern> - - <!-- keep 30 days' worth of history capped at 3GB total size --> - <maxHistory>30</maxHistory> - <totalSizeCap>3GB</totalSizeCap> - - </rollingPolicy> - <encoder> - <pattern>${errorLoggerPattern}</pattern> - </encoder> - </appender> - - <appender name="asyncEELFError" class="ch.qos.logback.classic.AsyncAppender"> - <queueSize>256</queueSize> - <appender-ref ref="EELFError"/> - </appender> - - <appender name="EELFDebug" - class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>${debugLogDirectory}/${debugLogName}.log</file> - <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <!-- daily rollover --> - <fileNamePattern>${logDirectory}/${debugLogName}.%d{yyyy-MM-dd}.log.zip</fileNamePattern> - - <!-- keep 30 days' worth of history capped at 3GB total size --> - <maxHistory>30</maxHistory> - <totalSizeCap>3GB</totalSizeCap> - - </rollingPolicy> - <encoder> - <pattern>${defaultLoggerPattern}</pattern> - </encoder> - </appender> - - <appender name="asyncEELFDebug" class="ch.qos.logback.classic.AsyncAppender"> - <queueSize>256</queueSize> - <appender-ref ref="EELFDebug" /> - </appender> - - - - - <logger name="com.att.eelf" level="info" additivity="false"> - <appender-ref ref="asyncEELF" /> - </logger> - - <logger name="com.att.eelf" level="info" additivity="false"> - <appender-ref ref="asyncEELFAudit" /> - </logger> - - <logger name="com.att.eelf" level="debug" additivity="false"> - <appender-ref ref="asyncEELFDebug" /> - </logger> - - <logger name="com.att.eelf.error" level="info" additivity="false"> - <appender-ref ref="asyncEELFError" /> - </logger> - - <logger name="com.att.eelf.metrics" level="info" additivity="false"> - <appender-ref ref="asyncEELFMetrics" /> - </logger> - - <root level="DEBUG"> - <appender-ref ref="asyncEELF" /> - </root> - -</configuration> diff --git a/src/main/resources/mdbc.properties b/src/main/resources/mdbc.properties deleted file mode 100755 index 3e207aa..0000000 --- a/src/main/resources/mdbc.properties +++ /dev/null @@ -1,12 +0,0 @@ -# -# A list of all Mixins that should be checked by MDBC -# -MIXINS= \ - org.onap.music.mdbc.mixins.H2Mixin \ - org.onap.music.mdbc.mixins.H2ServerMixin \ - org.onap.music.mdbc.mixins.MySQLMixin \ - org.onap.music.mdbc.mixins.CassandraMixin \ - org.onap.music.mdbc.mixins.Cassandra2Mixin - -critical.tables= \ - TEST
\ No newline at end of file diff --git a/src/main/resources/mdbc_driver.properties b/src/main/resources/mdbc_driver.properties deleted file mode 100644 index 487feb3..0000000 --- a/src/main/resources/mdbc_driver.properties +++ /dev/null @@ -1,13 +0,0 @@ -# -# A list of all Mixins that should be checked by MDBC -# -MIXINS= \ - org.onap.music.mdbc.mixins.H2Mixin \ - org.onap.music.mdbc.mixins.H2ServerMixin \ - org.onap.music.mdbc.mixins.MySQLMixin \ - org.onap.music.mdbc.mixins.CassandraMixin \ - org.onap.music.mdbc.mixins.Cassandra2Mixin - -DEFAULT_DRIVERS=\ - org.h2.Driver \ - com.mysql.jdbc.Driver
\ No newline at end of file diff --git a/src/main/resources/music.properties b/src/main/resources/music.properties deleted file mode 100644 index 204e3f6..0000000 --- a/src/main/resources/music.properties +++ /dev/null @@ -1,8 +0,0 @@ -cassandra.host =\ - 143.215.128.49 -cassandra.user =\ - cassandra -cassandra.password =\ - cassandra -zookeeper.host =\ - localhost
\ No newline at end of file diff --git a/src/main/resources/tests.json b/src/main/resources/tests.json deleted file mode 100755 index 8df9f34..0000000 --- a/src/main/resources/tests.json +++ /dev/null @@ -1,163 +0,0 @@ -{ - "tests": [ - "org.onap.music.mdbc.tests.Test_Insert", - "org.onap.music.mdbc.tests.Test_Delete", - "org.onap.music.mdbc.tests.Test_Transactions" - ], - "configs": [ - { - "description": "H2 with Cassandra with two connections", - "MDBC_DB_MIXIN": "h2", - "MDBC_MUSIC_MIXIN": "cassandra", - "replicas": "0,1", - "music_keyspace": "mdbctest1", - "music_address": "localhost", - "music_rfactor": "1", - "connections": [ - { - "name": "Connection 0", - "url": "jdbc:mdbc:mem:db0", - "user": "", - "password": "", - "myid": "0" - }, - { - "name": "Connection 1", - "url": "jdbc:mdbc:mem:db1", - "user": "", - "password": "", - "myid": "1" - } - ] - }, - { - "description": "H2 with Cassandra2 with three connections", - "MDBC_DB_MIXIN": "h2", - "MDBC_MUSIC_MIXIN": "cassandra2", - "replicas": "0,1,2", - "music_keyspace": "mdbctest2", - "music_address": "localhost", - "music_rfactor": "1", - "user": "", - "password": "", - "connections": [ - { - "name": "Connection 0", - "url": "jdbc:mdbc:mem:db0", - "myid": "0" - }, - { - "name": "Connection 1", - "url": "jdbc:mdbc:mem:db1", - "myid": "1" - }, - { - "name": "Connection 2", - "url": "jdbc:mdbc:mem:db2", - "myid": "2" - } - ] - }, - { - "description": "H2 Server with Cassandra2 with two connections", - "MDBC_DB_MIXIN": "h2server", - "MDBC_MUSIC_MIXIN": "cassandra2", - "replicas": "0,1", - "music_keyspace": "mdbctest3", - "music_address": "localhost", - "music_rfactor": "1", - "connections": [ - { - "name": "Connection 0", - "url": "jdbc:mdbc:tcp://localhost/mdbc0", - "user": "", - "password": "", - "myid": "0" - }, - { - "name": "Connection 1", - "url": "jdbc:mdbc:tcp://localhost/mdbc1", - "user": "", - "password": "", - "myid": "1" - } - ] - }, - { - "description": "MySQL with Cassandra2 with two connections", - "MDBC_DB_MIXIN": "mysql", - "MDBC_MUSIC_MIXIN": "cassandra2", - "replicas": "0,1,2", - "music_keyspace": "mdbctest4", - "music_address": "localhost", - "music_rfactor": "1", - "user": "root", - "password": "abc123", - "connections": [ - { - "name": "Connection 0", - "url": "jdbc:mdbc://127.0.0.1:3306/mdbc", - "myid": "0" - }, - { - "name": "Connection 1", - "url": "jdbc:mdbc://127.0.0.1:3306/mdbc2", - "myid": "1" - } - ] - }, - { - "description": "H2 (DB #1) and MySQL (DB #2) with Cassandra2", - "MDBC_MUSIC_MIXIN": "cassandra2", - "replicas": "0,1", - "music_keyspace": "mdbctest5", - "music_address": "localhost", - "music_rfactor": "1", - "connections": [ - { - "name": "Connection 0", - "MDBC_DB_MIXIN": "h2", - "url": "jdbc:mdbc:mem:db9", - "user": "", - "password": "", - "myid": "0" - }, - { - "name": "Connection 1", - "MDBC_DB_MIXIN": "mysql", - "url": "jdbc:mdbc://127.0.0.1:3306/mdbc3", - "user": "root", - "password": "abc123", - "myid": "1" - } - ] - } - ], - "zconfigs": [ - { - "description": "H2 Server with Cassandra2 with two connections", - "MDBC_DB_MIXIN": "h2server", - "MDBC_MUSIC_MIXIN": "cassandra2", - "replicas": "0,1", - "music_keyspace": "mdbctest3", - "music_address": "localhost", - "music_rfactor": "1", - "connections": [ - { - "name": "Connection 0", - "url": "jdbc:mdbc:tcp://localhost/mdbc0", - "user": "", - "password": "", - "myid": "0" - }, - { - "name": "Connection 1", - "url": "jdbc:mdbc:tcp://localhost/mdbc1", - "user": "", - "password": "", - "myid": "1" - } - ] - } - ] -} diff --git a/src/main/shell/mk_jboss_module b/src/main/shell/mk_jboss_module deleted file mode 100755 index 7bbb8d9..0000000 --- a/src/main/shell/mk_jboss_module +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash -# -# mk_jboss_module -- This script builds a JBoss module for MDBC. It should be run directly -# in the directory containing the MDBC code. It will build the MDBC jar file (under -# target/mdbc-jboss-module.tar), and then construct a tar file containing MDBC and all of -# its dependencies, as well as other files needed for a JBoss module. -# -# To install the module: untar the tar file on the server in the JBOSS_DIR/modules directory. -# - -if [ ! -f pom.xml ] -then - echo mk_jboss_module: Where is pom.xml? - exit 1 -fi - -mvn -Dmaven.test.skip=true package -if [ $? != 0 ] -then - echo mk_jboss_module: maven failed... - exit 1 -fi - -T=/tmp/mk_jboss_module$$ -T2=$T/org/onap/music/mdbc/main -MODULE=$T2/module.xml -TARGET=`pwd`/target/mdbc-jboss-module.tar -JARS=$( mvn dependency:build-classpath | grep -v INFO | tr : '\012' ) - -mkdir -p $T2 -cp $JARS $T2 -cp target/mdbc-0.0.1-SNAPSHOT.jar $T2 -JAR2=$( cd $T2; ls *.jar ) - -cat > $MODULE <<EOF -<?xml version="1.0" encoding="UTF-8"?> -<!-- - Note: This module includes a copy of the H2 database, rather than depending on the - com.h2database.h2 module included with JBoss, because I could not get that copy to work. ---> -<module xmlns="urn:jboss:module:1.1" name="org.onap.music.mdbc"> - <resources> -EOF -for i in $JAR2; do echo " <resource-root path=\"$i\"/>"; done >> $MODULE -cat >> $MODULE <<EOF - </resources> - <dependencies> - <module name="javax.api"/> - <module name="sun.jdk"/> - </dependencies> -</module> -EOF -chmod 444 $T2/*.jar $MODULE - -mkdir -p target -(cd $T; tar cf $TARGET com ) -rm -fr $T diff --git a/src/main/shell/run_h2_server b/src/main/shell/run_h2_server deleted file mode 100755 index 9b14e63..0000000 --- a/src/main/shell/run_h2_server +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# run_h2_server -- This script starts the H2 DB server with all necessary jar files. -# It should be run in the directory containing the MDBC code. -# - -POM=pom-h2server.xml - -if [ ! -f $POM ] -then - echo mk_jboss_module: Where is $POM? - exit 1 -fi - -mvn -f $POM -Dmaven.test.skip=true install -if [ $? != 0 ] -then - echo mk_jboss_module: maven failed... - exit 1 -fi - -export CLASSPATH=$( mvn -f $POM dependency:build-classpath | grep -v INFO ) -echo CLASSPATH is: -echo $CLASSPATH | tr : '\012' - -cd /tmp -java org.h2.tools.Server |