summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--cps-application/src/main/resources/application.yml5
-rwxr-xr-xcps-dependencies/pom.xml6
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java14
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/inventory/sync/ModuleSyncWatchdog.java2
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy33
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncWatchdogSpec.groovy4
-rwxr-xr-xcps-parent/pom.xml2
-rw-r--r--cps-ri/src/main/resources/hibernate.cfg.xml2
-rw-r--r--docs/deployment.rst4
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmDataSubscriptionsPerfTest.groovy72
-rw-r--r--integration-test/src/test/resources/hibernate.cfg.xml2
11 files changed, 103 insertions, 43 deletions
diff --git a/cps-application/src/main/resources/application.yml b/cps-application/src/main/resources/application.yml
index 016356801..7beab2e7c 100644
--- a/cps-application/src/main/resources/application.yml
+++ b/cps-application/src/main/resources/application.yml
@@ -37,9 +37,8 @@ spring:
ddl-auto: create
open-in-view: false
properties:
- hibernate:
- enable_lazy_load_no_trans: true
- dialect: org.hibernate.dialect.PostgreSQLDialect
+ hibernate.enable_lazy_load_no_trans: true
+ hibernate.dialect: org.hibernate.dialect.PostgreSQLDialect
datasource:
url: jdbc:postgresql://${DB_HOST}:${DB_PORT:5432}/cpsdb
diff --git a/cps-dependencies/pom.xml b/cps-dependencies/pom.xml
index 16f76b91d..f6931c302 100755
--- a/cps-dependencies/pom.xml
+++ b/cps-dependencies/pom.xml
@@ -78,7 +78,7 @@
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
- <version>3.0.0</version>
+ <version>3.1.2</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@@ -104,7 +104,7 @@
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-boot-starter</artifactId>
- <version>3.0.0</version>
+ <version>3.1.2</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
@@ -259,7 +259,7 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
- <version>2.14.0</version>
+ <version>2.15.2</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java
index 62a380ca5..8b28717db 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java
@@ -23,6 +23,7 @@ package org.onap.cps.ncmp.api.impl.config.embeddedcache;
import com.hazelcast.config.MapConfig;
import com.hazelcast.config.QueueConfig;
import com.hazelcast.map.IMap;
+import java.util.Set;
import java.util.concurrent.BlockingQueue;
import lombok.extern.slf4j.Slf4j;
import org.onap.cps.cache.HazelcastCacheConfig;
@@ -44,6 +45,8 @@ public class SynchronizationCacheConfig extends HazelcastCacheConfig {
private static final MapConfig moduleSyncStartedConfig = createMapConfig("moduleSyncStartedConfig");
private static final MapConfig dataSyncSemaphoresConfig = createMapConfig("dataSyncSemaphoresConfig");
+ private static final MapConfig moduleSetTagCacheMapConfig = createMapConfig("moduleSetTagCacheMapConfig");
+
/**
* Module Sync Distributed Queue Instance.
*
@@ -74,4 +77,15 @@ public class SynchronizationCacheConfig extends HazelcastCacheConfig {
public IMap<String, Boolean> dataSyncSemaphores() {
return createHazelcastInstance("dataSyncSemaphores", dataSyncSemaphoresConfig).getMap("dataSyncSemaphores");
}
+
+ /**
+ * IMap instance for cached ModulesSetTags.
+ *
+ * @return configured map of ModuleSetTags
+ */
+ @Bean
+ public IMap<String, Set<String>> moduleSetTagCache() {
+ return createHazelcastInstance("moduleSetTags", moduleSetTagCacheMapConfig)
+ .getMap("moduleSetTagCache");
+ }
}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/inventory/sync/ModuleSyncWatchdog.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/inventory/sync/ModuleSyncWatchdog.java
index 916fafd30..6ba52ee16 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/inventory/sync/ModuleSyncWatchdog.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/inventory/sync/ModuleSyncWatchdog.java
@@ -25,6 +25,7 @@ import com.hazelcast.map.IMap;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
+import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
@@ -48,6 +49,7 @@ public class ModuleSyncWatchdog {
private final IMap<String, Object> moduleSyncStartedOnCmHandles;
private final ModuleSyncTasks moduleSyncTasks;
private final AsyncTaskExecutor asyncTaskExecutor;
+ private final IMap<String, Set<String>> moduleSetTagCache;
private static final int MODULE_SYNC_BATCH_SIZE = 100;
private static final long PREVENT_CPU_BURN_WAIT_TIME_MILLIS = 10;
private static final String VALUE_FOR_HAZELCAST_IN_PROGRESS_MAP = "Started";
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy
index c0fc18abf..2fa960692 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy
@@ -44,6 +44,9 @@ class SynchronizationCacheConfigSpec extends Specification {
@Autowired
private IMap<String, Boolean> dataSyncSemaphores
+ @Autowired
+ private IMap<String, Set<String>> moduleSetTagCache
+
def 'Embedded (hazelcast) Caches for Module and Data Sync.'() {
expect: 'system is able to create an instance of the Module Sync Work Queue'
assert null != moduleSyncWorkQueue
@@ -51,10 +54,12 @@ class SynchronizationCacheConfigSpec extends Specification {
assert null != moduleSyncStartedOnCmHandles
and: 'system is able to create an instance of a map to hold data sync semaphores'
assert null != dataSyncSemaphores
- and: 'there are at least 3 instances'
- assert Hazelcast.allHazelcastInstances.size() > 2
+ and: 'system is able to create an instance of a map to hold module set tags'
+ assert null != moduleSetTagCache
+ and: 'there are at least 4 instances'
+ assert Hazelcast.allHazelcastInstances.size() > 3
and: 'they have the correct names (in any order)'
- assert Hazelcast.allHazelcastInstances.name.containsAll('moduleSyncWorkQueue', 'moduleSyncStartedOnCmHandles', 'dataSyncSemaphores' )
+ assert Hazelcast.allHazelcastInstances.name.containsAll('moduleSyncWorkQueue', 'moduleSyncStartedOnCmHandles', 'dataSyncSemaphores', 'moduleSetTags')
}
def 'Verify configs for Distributed objects'(){
@@ -67,6 +72,9 @@ class SynchronizationCacheConfigSpec extends Specification {
and: 'the Data Sync Semaphores Map config'
def dataSyncSemaphoresConfig = Hazelcast.getHazelcastInstanceByName('dataSyncSemaphores').config
def dataSyncSemaphoresMapConfig = dataSyncSemaphoresConfig.mapConfigs.get('dataSyncSemaphoresConfig')
+ and: 'the Module Set Tag Map config'
+ def moduleSetTagCacheConfig = Hazelcast.getHazelcastInstanceByName('moduleSetTags').config
+ def moduleSetTagMapConfig = moduleSetTagCacheConfig.mapConfigs.get('moduleSetTagCacheMapConfig')
expect: 'system created instance with correct config of Module Sync Work Queue'
assert moduleSyncDefaultWorkQueueConfig.backupCount == 3
assert moduleSyncDefaultWorkQueueConfig.asyncBackupCount == 3
@@ -76,11 +84,15 @@ class SynchronizationCacheConfigSpec extends Specification {
and: 'Data Sync Semaphore Map has the correct settings'
assert dataSyncSemaphoresMapConfig.backupCount == 3
assert dataSyncSemaphoresMapConfig.asyncBackupCount == 3
+ and: 'Module Set Tag Map has the correct settings'
+ assert moduleSetTagMapConfig.backupCount == 3
+ assert moduleSetTagMapConfig.asyncBackupCount == 3
and: 'all instances are part of same cluster'
def testClusterName = 'cps-and-ncmp-test-caches'
assert moduleSyncWorkQueueConfig.clusterName == testClusterName
assert moduleSyncStartedOnCmHandlesConfig.clusterName == testClusterName
assert dataSyncSemaphoresConfig.clusterName == testClusterName
+ assert moduleSetTagCacheConfig.clusterName == testClusterName
}
def 'Verify deployment network configs for Distributed objects'() {
@@ -90,6 +102,8 @@ class SynchronizationCacheConfigSpec extends Specification {
def moduleSyncStartedOnCmHandlesNetworkConfig = Hazelcast.getHazelcastInstanceByName('moduleSyncStartedOnCmHandles').config.networkConfig
and: 'the Data Sync Semaphores Map config'
def dataSyncSemaphoresNetworkConfig = Hazelcast.getHazelcastInstanceByName('dataSyncSemaphores').config.networkConfig
+ and: 'the Module Set Tag Map config'
+ def moduleSetTagNetworkConfig = Hazelcast.getHazelcastInstanceByName('moduleSetTags').config.networkConfig
expect: 'system created instance with correct config of Module Sync Work Queue'
assert queueNetworkConfig.join.autoDetectionConfig.enabled
assert !queueNetworkConfig.join.kubernetesConfig.enabled
@@ -99,7 +113,9 @@ class SynchronizationCacheConfigSpec extends Specification {
and: 'Data Sync Semaphore Map has the correct settings'
assert dataSyncSemaphoresNetworkConfig.join.autoDetectionConfig.enabled
assert !dataSyncSemaphoresNetworkConfig.join.kubernetesConfig.enabled
-
+ and: 'Module Set Tag Map has the correct settings'
+ assert moduleSetTagNetworkConfig.join.autoDetectionConfig.enabled
+ assert !moduleSetTagNetworkConfig.join.kubernetesConfig.enabled
}
def 'Verify network config'() {
@@ -135,6 +151,15 @@ class SynchronizationCacheConfigSpec extends Specification {
waitMax2SecondsForKeyExpiration(dataSyncSemaphores, 'testKeyDataSync')
}
+ def 'Time to Live Verify for Module Set Tag'() {
+ when: 'the key is inserted with a TTL of 1 second'
+ moduleSetTagCache.put('testKeyModuleSetTag', ['module-set-tag'] as Set, 1, TimeUnit.SECONDS)
+ then: 'the entry is present in the map'
+ assert moduleSetTagCache.get('testKeyModuleSetTag') != null
+ and: 'the entry expires in less then 2 seconds'
+ waitMax2SecondsForKeyExpiration(moduleSetTagCache, 'testKeyModuleSetTag')
+ }
+
def waitMax2SecondsForKeyExpiration(map, key) {
def count = 0
while ( map.get(key)!=null && ++count <= 20 ) {
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncWatchdogSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncWatchdogSpec.groovy
index 94ee6eae8..d85686aa3 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncWatchdogSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncWatchdogSpec.groovy
@@ -45,7 +45,9 @@ class ModuleSyncWatchdogSpec extends Specification {
def spiedAsyncTaskExecutor = Spy(AsyncTaskExecutor)
- def objectUnderTest = new ModuleSyncWatchdog(mockSyncUtils, moduleSyncWorkQueue , mockModuleSyncStartedOnCmHandles, mockModuleSyncTasks, spiedAsyncTaskExecutor)
+ def moduleSetTagCache = Mock(IMap<String, Set<String>>)
+
+ def objectUnderTest = new ModuleSyncWatchdog(mockSyncUtils, moduleSyncWorkQueue , mockModuleSyncStartedOnCmHandles, mockModuleSyncTasks, spiedAsyncTaskExecutor, moduleSetTagCache)
void setup() {
spiedAsyncTaskExecutor.setupThreadPool()
diff --git a/cps-parent/pom.xml b/cps-parent/pom.xml
index 248bc28cb..41215567c 100755
--- a/cps-parent/pom.xml
+++ b/cps-parent/pom.xml
@@ -118,7 +118,7 @@
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
- <version>3.0.0</version>
+ <version>3.1.2</version>
<executions>
<execution>
<goals>
diff --git a/cps-ri/src/main/resources/hibernate.cfg.xml b/cps-ri/src/main/resources/hibernate.cfg.xml
index 98e6cfc5b..1b822b9de 100644
--- a/cps-ri/src/main/resources/hibernate.cfg.xml
+++ b/cps-ri/src/main/resources/hibernate.cfg.xml
@@ -9,7 +9,7 @@
<property name="hibernate.connection.url">jdbc:postgresql://${DB_HOST}:${DB_PORT:5432}/cpsdb</property>
<property name="hibernate.connection.username">${DB_USERNAME}</property>
<property name="hibernate.connection.password">${DB_PASSWORD}</property>
- <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQL82Dialect</property>
+ <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQLDialect</property>
<property name="show_sql">true</property>
<property name="hibernate.hbm2ddl.auto">update</property>
</session-factory>
diff --git a/docs/deployment.rst b/docs/deployment.rst
index acc32e364..0642e6a8e 100644
--- a/docs/deployment.rst
+++ b/docs/deployment.rst
@@ -336,5 +336,7 @@ Below are the list of distributed datastructures that we have.
+--------------+---------------------------------+----------------------------------------------------------+
| cps-ncmp | trustLevelPerDmiPlugin | Stores the TrustLevel for the dmi-plugins. |
+--------------+---------------------------------+----------------------------------------------------------+
+| cps-ncmp | moduleSetTagCacheMapConfig | Stores the Module Set Tags for cmHandles. |
++--------------+---------------------------------+----------------------------------------------------------+
-Total number of caches : 7 \ No newline at end of file
+Total number of caches : 8 \ No newline at end of file
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmDataSubscriptionsPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmDataSubscriptionsPerfTest.groovy
index 7e7dedfc3..cf5c3f689 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmDataSubscriptionsPerfTest.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmDataSubscriptionsPerfTest.groovy
@@ -55,47 +55,63 @@ class CmDataSubscriptionsPerfTest extends NcmpPerfTestBase {
recordAndAssertPerformance("Query all subscribers", 1_000, durationInMillis)
}
- def 'Worst case new subscription (200x10 new entries).'() {
- given: 'a new subscription with non-matching data'
- def subscribers = createLeafList('subscribers',1, subscriberIdPrefix)
- def filters = '"filters":' + createJsonArray('filter',numberOfFiltersPerCmHandle,'xpath','other_' + xpathPrefix,subscribers)
- def cmHandles = createJsonArray('cm-handle',numberOfCmHandlesPerCmDataSubscription,'id','other' + cmHandlePrefix, filters)
- when: 'Insert a new subscription'
- stopWatch.start()
- cpsDataService.saveData(NCMP_PERFORMANCE_TEST_DATASPACE, CM_DATA_SUBSCRIPTIONS_ANCHOR, xPathForDataStore1CmHandles, cmHandles, now)
- stopWatch.stop()
- def durationInMillis = stopWatch.getTotalTimeMillis()
- then: 'insert new subscription with 1 second'
- recordAndAssertPerformance("Insert new subscription", 1_000, durationInMillis)
- }
-
def 'Worst case subscription update (200x10 matching entries).'() {
given: 'all filters are queried'
def cpsPath = '//filter'
def result = objectUnderTest.queryDataNodes(NCMP_PERFORMANCE_TEST_DATASPACE, CM_DATA_SUBSCRIPTIONS_ANCHOR, cpsPath, INCLUDE_ALL_DESCENDANTS)
+ and: 'there are the expected number of subscribers per subscription'
+ assert result.collect {it.leaves.subscribers.size()}.sum() == totalNumberOfEntries * numberOfCmDataSubscribers
and: 'find all entries for an existing subscriptions'
def matches = querySubscriptionsByIteration(result, 1)
- when: 'Update all subscriptions found'
+ when: 'update all subscriptions found'
stopWatch.start()
- /* the production code version of this should manipulate the original subscribersAsArray of course
- but for the (performance) poc creating another array with one extra element suffices
- */
- def jsonPerPath = [:]
- matches.each { xpath, subscribersAsArray ->
+ HashMap<String, List<String>> filterEntriesPerPath = [:]
+ matches.each { dataNode, subscribersAsArray ->
def updatedSubscribers = createLeafList('subscribers', 1 + numberOfCmDataSubscribers, subscriberIdPrefix)
- def filterEntry = '{"filter": {"xpath":"' + xpath + '", ' + updatedSubscribers + ' } }'
- def parentPath = xpath.toString().substring(0, xpath.toString().indexOf('/filter[@xpath='))
- jsonPerPath.put(parentPath, filterEntry)
+ def filterEntry = '{"xpath":"' + dataNode.leaves.xpath + '", ' + updatedSubscribers + ' }'
+ def parentPath = dataNode.xpath.toString().substring(0, dataNode.xpath.toString().indexOf('/filter[@xpath='))
+ filterEntriesPerPath.putIfAbsent(parentPath, new ArrayList<String>())
+ filterEntriesPerPath.get(parentPath).add(filterEntry)
+ }
+ HashMap<String, String> jsonPerPath = [:]
+ filterEntriesPerPath.each { parentPath, filterEntries ->
+ jsonPerPath.put(parentPath, '{"filter": [' + filterEntries.join(',') + ']}')
}
- cpsDataService.updateDataNodesAndDescendants(NCMP_PERFORMANCE_TEST_DATASPACE, CM_DATA_SUBSCRIPTIONS_ANCHOR, jsonPerPath, now)
+
+ // NOTE Below fails as updateDataNodesAndDescendants can't handle JSON lists!
+ // cpsDataService.updateDataNodesAndDescendants(NCMP_PERFORMANCE_TEST_DATASPACE, CM_DATA_SUBSCRIPTIONS_ANCHOR, jsonPerPath, now)
+
+ // So update for each CM-handle instead:
+ jsonPerPath.each { parentPath, json ->
+ // Around 8.5 seconds for long strings, 4.8 with short strings
+ // cpsDataService.updateDataNodeAndDescendants(NCMP_PERFORMANCE_TEST_DATASPACE, CM_DATA_SUBSCRIPTIONS_ANCHOR, parentPath, json, now)
+ // Around 6.5 seconds for long strings, 3.3 seconds with short strings
+ cpsDataService.updateNodeLeaves(NCMP_PERFORMANCE_TEST_DATASPACE, CM_DATA_SUBSCRIPTIONS_ANCHOR, parentPath, json, now)
+ }
+
stopWatch.stop()
def durationInMillis = stopWatch.getTotalTimeMillis()
- then: 'Update matching subscription within 8 seconds'
- //TODO Toine check with Daniel if this can be optimized quickly without really changing production code
- // ie is there a better way of doing these 2,000 updates
+ then: 'a subscriber has been added to each filter entry'
+ def resultAfter = objectUnderTest.queryDataNodes(NCMP_PERFORMANCE_TEST_DATASPACE, CM_DATA_SUBSCRIPTIONS_ANCHOR, cpsPath, INCLUDE_ALL_DESCENDANTS)
+ assert resultAfter.collect {it.leaves.subscribers.size()}.sum() == totalNumberOfEntries * (1 + numberOfCmDataSubscribers)
+ and: 'update matching subscription within 8 seconds'
recordAndAssertPerformance("Update matching subscription", 8_000, durationInMillis)
}
+ def 'Worst case new subscription (200x10 new entries).'() {
+ given: 'a new subscription with non-matching data'
+ def subscribers = createLeafList('subscribers',1, subscriberIdPrefix)
+ def filters = '"filters":' + createJsonArray('filter',numberOfFiltersPerCmHandle,'xpath','other_' + xpathPrefix,subscribers)
+ def cmHandles = createJsonArray('cm-handle',numberOfCmHandlesPerCmDataSubscription,'id','other' + cmHandlePrefix, filters)
+ when: 'Insert a new subscription'
+ stopWatch.start()
+ cpsDataService.saveData(NCMP_PERFORMANCE_TEST_DATASPACE, CM_DATA_SUBSCRIPTIONS_ANCHOR, xPathForDataStore1CmHandles, cmHandles, now)
+ stopWatch.stop()
+ def durationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'insert new subscription with 1 second'
+ recordAndAssertPerformance("Insert new subscription", 1_000, durationInMillis)
+ }
+
def querySubscriptionsByIteration(Collection<DataNode> allSubscriptionsAsDataNodes, targetSubscriptionSequenceNumber) {
def matches = [:]
allSubscriptionsAsDataNodes.each {
@@ -104,7 +120,7 @@ class CmDataSubscriptionsPerfTest extends NcmpPerfTestBase {
def targetSubscriptionId = subscriberIdPrefix + '-' + ( targetSubscriptionSequenceNumber > 0 ? targetSubscriptionSequenceNumber
: 1 + random.nextInt(numberOfCmDataSubscribers) )
if (subscribersAsSet.contains(targetSubscriptionId)) {
- matches.put(it.xpath, subscribersAsArray)
+ matches.put(it, subscribersAsArray)
}
}
return matches
diff --git a/integration-test/src/test/resources/hibernate.cfg.xml b/integration-test/src/test/resources/hibernate.cfg.xml
index 513c00ad2..8d5139b60 100644
--- a/integration-test/src/test/resources/hibernate.cfg.xml
+++ b/integration-test/src/test/resources/hibernate.cfg.xml
@@ -9,7 +9,7 @@
<property name="hibernate.connection.url">${DB_URL}</property>
<property name="hibernate.connection.username">${DB_USERNAME}</property>
<property name="hibernate.connection.password">${DB_PASSWORD}</property>
- <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQL82Dialect</property>
+ <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQLDialect</property>
<property name="show_sql">true</property>
<property name="hibernate.hbm2ddl.auto">none</property>
</session-factory>