aboutsummaryrefslogtreecommitdiffstats
path: root/cps-ri/src
diff options
context:
space:
mode:
Diffstat (limited to 'cps-ri/src')
-rwxr-xr-xcps-ri/src/main/java/org/onap/cps/spi/impl/CpsAdminPersistenceServiceImpl.java9
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java41
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java10
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java3
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java65
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/utils/SessionManager.java165
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/utils/TimeLimiterProvider.java33
-rw-r--r--cps-ri/src/main/resources/hibernate.cfg.xml16
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy31
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy42
-rwxr-xr-xcps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy61
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy116
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy69
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy99
-rw-r--r--cps-ri/src/test/resources/data/cps-path-query.sql38
-rwxr-xr-xcps-ri/src/test/resources/data/fragment.sql43
-rw-r--r--cps-ri/src/test/resources/hibernate.cfg.xml16
17 files changed, 717 insertions, 140 deletions
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsAdminPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsAdminPersistenceServiceImpl.java
index 50b27207ee..2e7bb7e969 100755
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsAdminPersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsAdminPersistenceServiceImpl.java
@@ -24,6 +24,7 @@ package org.onap.cps.spi.impl;
import java.util.Collection;
import java.util.List;
+import java.util.Set;
import java.util.stream.Collectors;
import javax.transaction.Transactional;
import lombok.AllArgsConstructor;
@@ -36,8 +37,10 @@ import org.onap.cps.spi.exceptions.AlreadyDefinedException;
import org.onap.cps.spi.exceptions.DataspaceInUseException;
import org.onap.cps.spi.exceptions.ModuleNamesNotFoundException;
import org.onap.cps.spi.model.Anchor;
+import org.onap.cps.spi.model.CmHandleQueryParameters;
import org.onap.cps.spi.repository.AnchorRepository;
import org.onap.cps.spi.repository.DataspaceRepository;
+import org.onap.cps.spi.repository.ModuleReferenceRepository;
import org.onap.cps.spi.repository.SchemaSetRepository;
import org.onap.cps.spi.repository.YangResourceRepository;
import org.springframework.dao.DataIntegrityViolationException;
@@ -51,6 +54,7 @@ public class CpsAdminPersistenceServiceImpl implements CpsAdminPersistenceServic
private final AnchorRepository anchorRepository;
private final SchemaSetRepository schemaSetRepository;
private final YangResourceRepository yangResourceRepository;
+ private final ModuleReferenceRepository moduleReferenceRepository;
@Override
public void createDataspace(final String dataspaceName) {
@@ -132,6 +136,11 @@ public class CpsAdminPersistenceServiceImpl implements CpsAdminPersistenceServic
anchorRepository.delete(anchorEntity);
}
+ @Override
+ public Set<String> queryCmHandles(final CmHandleQueryParameters cmHandleQueryParameters) {
+ return moduleReferenceRepository.queryCmHandles(cmHandleQueryParameters);
+ }
+
private AnchorEntity getAnchorEntity(final String dataspaceName, final String anchorName) {
final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName);
return anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName);
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
index 78862d7233..daf4dd757b 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
@@ -41,6 +41,8 @@ import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.hibernate.StaleStateException;
import org.onap.cps.cpspath.parser.CpsPathQuery;
+import org.onap.cps.cpspath.parser.CpsPathUtil;
+import org.onap.cps.cpspath.parser.PathParsingException;
import org.onap.cps.spi.CpsDataPersistenceService;
import org.onap.cps.spi.FetchDescendantsOption;
import org.onap.cps.spi.entities.AnchorEntity;
@@ -56,6 +58,7 @@ import org.onap.cps.spi.model.DataNodeBuilder;
import org.onap.cps.spi.repository.AnchorRepository;
import org.onap.cps.spi.repository.DataspaceRepository;
import org.onap.cps.spi.repository.FragmentRepository;
+import org.onap.cps.spi.utils.SessionManager;
import org.onap.cps.utils.JsonObjectMapper;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.stereotype.Service;
@@ -73,6 +76,8 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
private final JsonObjectMapper jsonObjectMapper;
+ private final SessionManager sessionManager;
+
private static final String REG_EX_FOR_OPTIONAL_LIST_INDEX = "(\\[@[\\s\\S]+?]){0,1})";
private static final Pattern REG_EX_PATTERN_FOR_LIST_ELEMENT_KEY_PREDICATE =
Pattern.compile("\\[(\\@([^\\/]{0,9999}))\\]$");
@@ -171,8 +176,14 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
if (isRootXpath(xpath)) {
return fragmentRepository.findFirstRootByDataspaceAndAnchor(dataspaceEntity, anchorEntity);
} else {
+ final String normalizedXpath;
+ try {
+ normalizedXpath = CpsPathUtil.getNormalizedXpath(xpath);
+ } catch (final PathParsingException e) {
+ throw new CpsPathException(e.getMessage());
+ }
return fragmentRepository.getByDataspaceAndAnchorAndXpath(dataspaceEntity, anchorEntity,
- xpath);
+ normalizedXpath);
}
}
@@ -183,8 +194,8 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName);
final CpsPathQuery cpsPathQuery;
try {
- cpsPathQuery = CpsPathQuery.createFrom(cpsPath);
- } catch (final IllegalStateException e) {
+ cpsPathQuery = CpsPathUtil.getCpsPathQuery(cpsPath);
+ } catch (final PathParsingException e) {
throw new CpsPathException(e.getMessage());
}
List<FragmentEntity> fragmentEntities =
@@ -199,6 +210,22 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
.collect(Collectors.toUnmodifiableList());
}
+ @Override
+ public String startSession() {
+ return sessionManager.startSession();
+ }
+
+ @Override
+ public void closeSession(final String sessionId) {
+ sessionManager.closeSession(sessionId);
+ }
+
+ @Override
+ public void lockAnchor(final String sessionId, final String dataspaceName,
+ final String anchorName, final Long timeoutInMilliseconds) {
+ sessionManager.lockAnchor(sessionId, dataspaceName, anchorName, timeoutInMilliseconds);
+ }
+
private static Set<String> processAncestorXpath(final List<FragmentEntity> fragmentEntities,
final CpsPathQuery cpsPathQuery) {
final Set<String> ancestorXpath = new HashSet<>();
@@ -365,12 +392,13 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
}
private boolean deleteDataNode(final FragmentEntity parentFragmentEntity, final String targetXpath) {
- if (parentFragmentEntity.getXpath().equals(targetXpath)) {
+ final String normalizedTargetXpath = CpsPathUtil.getNormalizedXpath(targetXpath);
+ if (parentFragmentEntity.getXpath().equals(normalizedTargetXpath)) {
fragmentRepository.delete(parentFragmentEntity);
return true;
}
if (parentFragmentEntity.getChildFragments()
- .removeIf(fragment -> fragment.getXpath().equals(targetXpath))) {
+ .removeIf(fragment -> fragment.getXpath().equals(normalizedTargetXpath))) {
fragmentRepository.save(parentFragmentEntity);
return true;
}
@@ -378,7 +406,8 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
}
private boolean deleteAllListElements(final FragmentEntity parentFragmentEntity, final String listXpath) {
- final String deleteTargetXpathPrefix = listXpath + "[";
+ final String normalizedListXpath = CpsPathUtil.getNormalizedXpath(listXpath);
+ final String deleteTargetXpathPrefix = normalizedListXpath + "[";
if (parentFragmentEntity.getChildFragments()
.removeIf(fragment -> fragment.getXpath().startsWith(deleteTargetXpathPrefix))) {
fragmentRepository.save(parentFragmentEntity);
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java
index 6551937e10..4bc9dd9603 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java
@@ -21,6 +21,8 @@
package org.onap.cps.spi.repository;
import java.util.Collection;
+import java.util.Set;
+import org.onap.cps.spi.model.CmHandleQueryParameters;
import org.onap.cps.spi.model.ModuleReference;
/**
@@ -31,4 +33,12 @@ public interface ModuleReferenceQuery {
Collection<ModuleReference> identifyNewModuleReferences(
final Collection<ModuleReference> moduleReferencesToCheck);
+ /**
+ * Query and return cm handles that match the given query parameters.
+ *
+ * @param cmHandleQueryParameters the cm handle query parameters
+ * @return collection of cm handle ids
+ */
+ Set<String> queryCmHandles(CmHandleQueryParameters cmHandleQueryParameters);
+
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java
index ce2bfe7847..f70e218373 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java
@@ -27,8 +27,7 @@ import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
-public interface ModuleReferenceRepository extends
- JpaRepository<YangResourceEntity, Long>, ModuleReferenceQuery {
+public interface ModuleReferenceRepository extends JpaRepository<YangResourceEntity, Long>, ModuleReferenceQuery {
Collection<ModuleReference> identifyNewModuleReferences(
final Collection<ModuleReference> moduleReferencesToCheck);
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java
index 0e79deb8e8..f85dea3a73 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java
@@ -24,21 +24,32 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
+import java.util.Map;
+import java.util.Set;
import java.util.UUID;
+import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
+import lombok.AllArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
+import org.onap.cps.spi.CpsDataPersistenceService;
+import org.onap.cps.spi.FetchDescendantsOption;
+import org.onap.cps.spi.model.CmHandleQueryParameters;
+import org.onap.cps.spi.model.DataNode;
import org.onap.cps.spi.model.ModuleReference;
import org.springframework.transaction.annotation.Transactional;
@Slf4j
@Transactional
+@AllArgsConstructor
public class ModuleReferenceRepositoryImpl implements ModuleReferenceQuery {
@PersistenceContext
private EntityManager entityManager;
+ private final CpsDataPersistenceService cpsDataPersistenceService;
+
@Override
@SneakyThrows
public Collection<ModuleReference> identifyNewModuleReferences(
@@ -57,6 +68,56 @@ public class ModuleReferenceRepositoryImpl implements ModuleReferenceQuery {
return identifyNewModuleReferencesForCmHandle(tempTableName);
}
+ /**
+ * Query and return cm handles that match the given query parameters.
+ *
+ * @param cmHandleQueryParameters the cm handle query parameters
+ * @return collection of cm handle ids
+ */
+ @Override
+ public Set<String> queryCmHandles(final CmHandleQueryParameters cmHandleQueryParameters) {
+
+ if (cmHandleQueryParameters.getPublicProperties().entrySet().isEmpty()) {
+ return getAllCmHandles();
+ }
+
+ final Collection<DataNode> amalgamatedQueryResult = new ArrayList<>();
+ int queryConditionCounter = 0;
+ for (final Map.Entry<String, String> entry : cmHandleQueryParameters.getPublicProperties().entrySet()) {
+ final StringBuilder cmHandlePath = new StringBuilder();
+ cmHandlePath.append("//public-properties[@name='").append(entry.getKey()).append("' ");
+ cmHandlePath.append("and @value='").append(entry.getValue()).append("']");
+ cmHandlePath.append("/ancestor::cm-handles");
+
+ final Collection<DataNode> singleConditionQueryResult =
+ cpsDataPersistenceService.queryDataNodes("NCMP-Admin",
+ "ncmp-dmi-registry", String.valueOf(cmHandlePath), FetchDescendantsOption.OMIT_DESCENDANTS);
+ if (++queryConditionCounter == 1) {
+ amalgamatedQueryResult.addAll(singleConditionQueryResult);
+ } else {
+ amalgamatedQueryResult.retainAll(singleConditionQueryResult);
+ }
+
+ if (amalgamatedQueryResult.isEmpty()) {
+ break;
+ }
+ }
+
+ return extractCmHandleIds(amalgamatedQueryResult);
+ }
+
+ private Set<String> getAllCmHandles() {
+ final Collection<DataNode> cmHandles = cpsDataPersistenceService.queryDataNodes("NCMP-Admin",
+ "ncmp-dmi-registry", "//public-properties/ancestor::cm-handles",
+ FetchDescendantsOption.OMIT_DESCENDANTS);
+ return extractCmHandleIds(cmHandles);
+ }
+
+ private Set<String> extractCmHandleIds(final Collection<DataNode> cmHandles) {
+ return cmHandles.stream().map(cmHandle -> cmHandle.getLeaves().get("id").toString())
+ .collect(Collectors.toSet());
+ }
+
private void createTemporaryTable(final String tempTableName) {
final StringBuilder sqlStringBuilder = new StringBuilder("CREATE TEMPORARY TABLE " + tempTableName + "(");
sqlStringBuilder.append(" id INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,");
@@ -94,8 +155,8 @@ public class ModuleReferenceRepositoryImpl implements ModuleReferenceQuery {
+ " AND yang_resource.revision=%1$s.revision"
+ " WHERE yang_resource.module_name IS NULL;", tempTableName);
- final List<Object[]> resultsAsObjects =
- entityManager.createNativeQuery(sql).getResultList();
+ @SuppressWarnings("unchecked")
+ final List<Object[]> resultsAsObjects = entityManager.createNativeQuery(sql).getResultList();
final List<ModuleReference> resultsAsModuleReferences = new ArrayList<>(resultsAsObjects.size());
for (final Object[] row : resultsAsObjects) {
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/utils/SessionManager.java b/cps-ri/src/main/java/org/onap/cps/spi/utils/SessionManager.java
new file mode 100644
index 0000000000..e2786887ac
--- /dev/null
+++ b/cps-ri/src/main/java/org/onap/cps/spi/utils/SessionManager.java
@@ -0,0 +1,165 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2022 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.spi.utils;
+
+import com.google.common.util.concurrent.TimeLimiter;
+import com.google.common.util.concurrent.UncheckedExecutionException;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.hibernate.HibernateException;
+import org.hibernate.LockMode;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.cfg.Configuration;
+import org.onap.cps.spi.entities.AnchorEntity;
+import org.onap.cps.spi.entities.DataspaceEntity;
+import org.onap.cps.spi.entities.SchemaSetEntity;
+import org.onap.cps.spi.entities.YangResourceEntity;
+import org.onap.cps.spi.exceptions.SessionManagerException;
+import org.onap.cps.spi.exceptions.SessionTimeoutException;
+import org.onap.cps.spi.repository.AnchorRepository;
+import org.onap.cps.spi.repository.DataspaceRepository;
+import org.springframework.stereotype.Component;
+
+@RequiredArgsConstructor
+@Slf4j
+@Component
+public class SessionManager {
+
+ private final TimeLimiterProvider timeLimiterProvider;
+ private final DataspaceRepository dataspaceRepository;
+ private final AnchorRepository anchorRepository;
+ private static SessionFactory sessionFactory;
+ private static ConcurrentHashMap<String, Session> sessionMap = new ConcurrentHashMap<>();
+
+ private synchronized void buildSessionFactory() {
+ if (sessionFactory == null) {
+ sessionFactory = new Configuration().configure("hibernate.cfg.xml")
+ .addAnnotatedClass(AnchorEntity.class)
+ .addAnnotatedClass(DataspaceEntity.class)
+ .addAnnotatedClass(SchemaSetEntity.class)
+ .addAnnotatedClass(YangResourceEntity.class)
+ .buildSessionFactory();
+ }
+ }
+
+ /**
+ * Starts a session which allows use of locks and batch interaction with the persistence service.
+ *
+ * @return Session ID string
+ */
+ public String startSession() {
+ buildSessionFactory();
+ final Session session = sessionFactory.openSession();
+ final String sessionId = UUID.randomUUID().toString();
+ sessionMap.put(sessionId, session);
+ session.beginTransaction();
+ return sessionId;
+ }
+
+ /**
+ * Close session.
+ * Locks will be released and changes will be committed.
+ *
+ * @param sessionId session ID
+ */
+ public void closeSession(final String sessionId) {
+ try {
+ final Session session = getSession(sessionId);
+ session.getTransaction().commit();
+ session.close();
+ } catch (final HibernateException e) {
+ throw new SessionManagerException("Cannot close session",
+ String.format("Unable to close session with session ID '%s'", sessionId), e);
+ } finally {
+ sessionMap.remove(sessionId);
+ }
+ }
+
+ /**
+ * Lock Anchor.
+ * To release locks(s), the session holding the lock(s) must be closed.
+ *
+ * @param sessionId session ID
+ * @param dataspaceName dataspace name
+ * @param anchorName anchor name
+ * @param timeoutInMilliseconds lock attempt timeout in milliseconds
+ */
+ @SneakyThrows
+ public void lockAnchor(final String sessionId, final String dataspaceName,
+ final String anchorName, final Long timeoutInMilliseconds) {
+ final ExecutorService executorService = Executors.newSingleThreadExecutor();
+ final TimeLimiter timeLimiter = timeLimiterProvider.getTimeLimiter(executorService);
+
+ try {
+ timeLimiter.callWithTimeout(() -> {
+ applyPessimisticWriteLockOnAnchor(sessionId, dataspaceName, anchorName);
+ return null;
+ }, timeoutInMilliseconds, TimeUnit.MILLISECONDS);
+ } catch (final TimeoutException e) {
+ throw new SessionTimeoutException(
+ "Timeout: Anchor locking failed",
+ "The error could be caused by another session holding a lock on the specified table. "
+ + "Retrying the sending the request could be required.", e);
+ } catch (final InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new SessionManagerException("Operation interrupted", "This thread was interrupted.", e);
+ } catch (final ExecutionException | UncheckedExecutionException e) {
+ if (e.getCause() != null) {
+ throw e.getCause();
+ }
+ throw new SessionManagerException(
+ "Operation Aborted",
+ "The transaction request was aborted. "
+ + "Retrying and checking all details are correct could be required", e);
+ } finally {
+ executorService.shutdownNow();
+ }
+ }
+
+ private void applyPessimisticWriteLockOnAnchor(final String sessionId, final String dataspaceName,
+ final String anchorName) {
+ final Session session = getSession(sessionId);
+ final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName);
+ final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName);
+ final int anchorId = anchorEntity.getId();
+ log.debug("Attempting to lock anchor {} for session {}", anchorName, sessionId);
+ session.get(AnchorEntity.class, anchorId, LockMode.PESSIMISTIC_WRITE);
+ log.info("Anchor {} successfully locked", anchorName);
+ }
+
+ private Session getSession(final String sessionId) {
+ final Session session = sessionMap.get(sessionId);
+ if (session == null) {
+ throw new SessionManagerException("Session not found",
+ String.format("Session with ID %s does not exist", sessionId));
+ }
+ return session;
+ }
+}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/utils/TimeLimiterProvider.java b/cps-ri/src/main/java/org/onap/cps/spi/utils/TimeLimiterProvider.java
new file mode 100644
index 0000000000..2bd7ac3763
--- /dev/null
+++ b/cps-ri/src/main/java/org/onap/cps/spi/utils/TimeLimiterProvider.java
@@ -0,0 +1,33 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2022 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.spi.utils;
+
+import com.google.common.util.concurrent.SimpleTimeLimiter;
+import com.google.common.util.concurrent.TimeLimiter;
+import java.util.concurrent.ExecutorService;
+import org.springframework.stereotype.Component;
+
+@Component
+public class TimeLimiterProvider {
+ public TimeLimiter getTimeLimiter(final ExecutorService executorService) {
+ return SimpleTimeLimiter.create(executorService);
+ }
+}
diff --git a/cps-ri/src/main/resources/hibernate.cfg.xml b/cps-ri/src/main/resources/hibernate.cfg.xml
new file mode 100644
index 0000000000..98e6cfc5b7
--- /dev/null
+++ b/cps-ri/src/main/resources/hibernate.cfg.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE hibernate-configuration PUBLIC
+ "-//Hibernate/Hibernate Configuration DTD 3.0//EN"
+ "http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
+
+<hibernate-configuration>
+ <session-factory>
+ <property name="hibernate.connection.driver_class">org.postgresql.Driver</property>
+ <property name="hibernate.connection.url">jdbc:postgresql://${DB_HOST}:${DB_PORT:5432}/cpsdb</property>
+ <property name="hibernate.connection.username">${DB_USERNAME}</property>
+ <property name="hibernate.connection.password">${DB_PASSWORD}</property>
+ <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQL82Dialect</property>
+ <property name="show_sql">true</property>
+ <property name="hibernate.hbm2ddl.auto">update</property>
+ </session-factory>
+</hibernate-configuration> \ No newline at end of file
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy
index 063bd5b5ae..2de087fc28 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2021 Nordix Foundation
+ * Copyright (C) 2021-2022 Nordix Foundation
* Modifications Copyright (C) 2021 Pantheon.tech
* Modifications Copyright (C) 2022 Bell Canada
* ================================================================================
@@ -22,6 +22,7 @@
package org.onap.cps.spi.impl
+import org.mockito.Mock
import org.onap.cps.spi.CpsAdminPersistenceService
import org.onap.cps.spi.exceptions.AlreadyDefinedException
import org.onap.cps.spi.exceptions.AnchorNotFoundException
@@ -30,15 +31,21 @@ import org.onap.cps.spi.exceptions.DataspaceNotFoundException
import org.onap.cps.spi.exceptions.SchemaSetNotFoundException
import org.onap.cps.spi.exceptions.ModuleNamesNotFoundException
import org.onap.cps.spi.model.Anchor
+import org.onap.cps.spi.model.CmHandleQueryParameters
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.test.context.jdbc.Sql
+import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper
class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
@Autowired
CpsAdminPersistenceService objectUnderTest
+ @Mock
+ ObjectMapper objectMapper
+
static final String SET_DATA = '/data/anchor.sql'
+ static final String SET_FRAGMENT_DATA = '/data/fragment.sql'
static final String SAMPLE_DATA_FOR_ANCHORS_WITH_MODULES = '/data/anchors-schemaset-modules.sql'
static final String DATASPACE_WITH_NO_DATA = 'DATASPACE-002-NO-DATA'
static final Integer DELETED_ANCHOR_ID = 3002
@@ -46,7 +53,7 @@ class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
@Sql(CLEAR_DATA)
def 'Create and retrieve a new dataspace.'() {
when: 'a new dataspace is created'
- def dataspaceName = 'some new dataspace'
+ def dataspaceName = 'some-new-dataspace'
objectUnderTest.createDataspace(dataspaceName)
then: 'that dataspace can be retrieved from the dataspace repository'
def dataspaceEntity = dataspaceRepository.findByName(dataspaceName).orElseThrow()
@@ -66,7 +73,7 @@ class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
@Sql([CLEAR_DATA, SET_DATA])
def 'Create and retrieve a new anchor.'() {
when: 'a new anchor is created'
- def newAnchorName = 'my new anchor'
+ def newAnchorName = 'my-new-anchor'
objectUnderTest.createAnchor(DATASPACE_NAME, SCHEMA_SET_NAME1, newAnchorName)
then: 'that anchor can be retrieved'
def anchor = objectUnderTest.getAnchor(DATASPACE_NAME, newAnchorName)
@@ -141,7 +148,7 @@ class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
@Sql(CLEAR_DATA)
def 'Get all anchors in unknown dataspace.'() {
when: 'attempt to get all anchors in an unknown dataspace'
- objectUnderTest.getAnchors('unknown dataspace')
+ objectUnderTest.getAnchors('unknown-dataspace')
then: 'an DataspaceNotFoundException is thrown'
thrown(DataspaceNotFoundException)
}
@@ -219,4 +226,20 @@ class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
'dataspace contains schemasets' | 'DATASPACE-003' || DataspaceInUseException | 'contains 1 schemaset(s)'
}
+ @Sql([CLEAR_DATA, SET_FRAGMENT_DATA])
+ def 'Retrieve cm handle ids when #scenario.'() {
+ when: 'the service is invoked'
+ def cmHandleQueryParameters = new CmHandleQueryParameters()
+ cmHandleQueryParameters.setPublicProperties(publicProperties)
+ def returnedCmHandles = objectUnderTest.queryCmHandles(cmHandleQueryParameters)
+ then: 'the correct expected cm handles are returned'
+ returnedCmHandles == expectedCmHandleIds
+ where: 'the following data is used'
+ scenario | publicProperties || expectedCmHandleIds
+ 'single matching property' | ['Contact' : 'newemailforstore@bookstore.com'] || ['PNFDemo2', 'PNFDemo', 'PNFDemo4'] as Set
+ 'public property dont match' | ['wont_match' : 'wont_match'] || [] as Set
+ '2 properties, only one match (and)' | ['Contact' : 'newemailforstore@bookstore.com', 'Contact2': 'newemailforstore2@bookstore.com'] || ['PNFDemo4'] as Set
+ '2 properties, no match (and)' | ['Contact' : 'newemailforstore@bookstore.com', 'Contact2': ''] || [] as Set
+ 'No public properties - return all cm handles' | [ : ] || ['PNFDemo3', 'PNFDemo', 'PNFDemo2', 'PNFDemo4'] as Set
+ }
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
index ae88d302bb..36b378a775 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2021 Nordix Foundation
+ * Copyright (C) 2021-2022 Nordix Foundation
* Modifications Copyright (C) 2021 Pantheon.tech
* Modifications Copyright (C) 2021 Bell Canada.
* ================================================================================
@@ -92,15 +92,15 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
}
where: 'the following data is used'
scenario | cpsPath || expectedXPaths
- 'fully unique descendant name' | '//categories[@code=2]' || ['/shops/shop[@id=1]/categories[@code=2]', '/shops/shop[@id=2]/categories[@code=1]', '/shops/shop[@id=2]/categories[@code=2]']
- 'descendant name match end of other node' | '//book' || ['/shops/shop[@id=1]/categories[@code=1]/book', '/shops/shop[@id=1]/categories[@code=2]/book']
- 'descendant with text condition on leaf' | '//book/title[text()="Chapters"]' || ['/shops/shop[@id=1]/categories[@code=2]/book']
+ 'fully unique descendant name' | '//categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']", "/shops/shop[@id='2']/categories[@code='1']", "/shops/shop[@id='2']/categories[@code='2']"]
+ 'descendant name match end of other node' | '//book' || ["/shops/shop[@id='1']/categories[@code='1']/book", "/shops/shop[@id='1']/categories[@code='2']/book"]
+ 'descendant with text condition on leaf' | '//book/title[text()="Chapters"]' || ["/shops/shop[@id='1']/categories[@code='2']/book"]
'descendant with text condition case mismatch' | '//book/title[text()="chapters"]' || []
- 'descendant with text condition on int leaf' | '//book/price[text()="5"]' || ['/shops/shop[@id=1]/categories[@code=1]/book']
- 'descendant with text condition on leaf-list' | '//book/labels[text()="special offer"]' || ['/shops/shop[@id=1]/categories[@code=1]/book']
+ 'descendant with text condition on int leaf' | '//book/price[text()="5"]' || ["/shops/shop[@id='1']/categories[@code='1']/book"]
+ 'descendant with text condition on leaf-list' | '//book/labels[text()="special offer"]' || ["/shops/shop[@id='1']/categories[@code='1']/book"]
'descendant with text condition partial match' | '//book/labels[text()="special"]' || []
- 'descendant with text condition (existing) empty string' | '//book/labels[text()=""]' || ['/shops/shop[@id=1]/categories[@code=1]/book']
- 'descendant with text condition on int leaf-list' | '//book/editions[text()="2000"]' || ['/shops/shop[@id=1]/categories[@code=2]/book']
+ 'descendant with text condition (existing) empty string' | '//book/labels[text()=""]' || ["/shops/shop[@id='1']/categories[@code='1']/book"]
+ 'descendant with text condition on int leaf-list' | '//book/editions[text()="2000"]' || ["/shops/shop[@id='1']/categories[@code='2']/book"]
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -115,10 +115,10 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
}
where: 'the following data is used'
scenario | cpsPath || expectedXPaths
- 'one leaf' | '//author[@FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]', '/shops/shop[@id=1]/categories[@code=2]/book/author[@FirstName="Joe" and @Surname="Smith"]']
- 'more than one leaf' | '//author[@FirstName="Joe" and @Surname="Bloggs"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
- 'leaves reversed in order' | '//author[@Surname="Bloggs" and @FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
- 'leaf and text condition' | '//author[@FirstName="Joe"]/Surname[text()="Bloggs"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
+ 'one leaf' | '//author[@FirstName="Joe"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']", "/shops/shop[@id='1']/categories[@code='2']/book/author[@FirstName='Joe' and @Surname='Smith']"]
+ 'more than one leaf' | '//author[@FirstName="Joe" and @Surname="Bloggs"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
+ 'leaves reversed in order' | '//author[@Surname="Bloggs" and @FirstName="Joe"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
+ 'leaf and text condition' | '//author[@FirstName="Joe"]/Surname[text()="Bloggs"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -133,9 +133,9 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
}
where: 'the following data is used'
scenario | cpsPath || expectedXPaths
- 'one partial key leaf' | '//author[@FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]', '/shops/shop[@id=1]/categories[@code=2]/book/author[@FirstName="Joe" and @Surname="Smith"]']
- 'one non key leaf' | '//author[@title="Dune"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
- 'mix of partial key and non key leaf' | '//author[@FirstName="Joe" and @title="Dune"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
+ 'one partial key leaf' | '//author[@FirstName="Joe"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']", "/shops/shop[@id='1']/categories[@code='2']/book/author[@FirstName='Joe' and @Surname='Smith']"]
+ 'one non key leaf' | '//author[@title="Dune"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
+ 'mix of partial key and non key leaf' | '//author[@FirstName="Joe" and @title="Dune"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -149,13 +149,13 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
}
where: 'the following data is used'
scenario | cpsPath || expectedXPaths
- 'multiple list-ancestors' | '//book/ancestor::categories' || ['/shops/shop[@id=1]/categories[@code=1]', '/shops/shop[@id=1]/categories[@code=2]']
- 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ['/shops/shop[@id=1]/categories[@code=1]']
+ 'multiple list-ancestors' | '//book/ancestor::categories' || ["/shops/shop[@id='1']/categories[@code='1']", "/shops/shop[@id='1']/categories[@code='2']"]
+ 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ["/shops/shop[@id='1']/categories[@code='1']"]
'top ancestor' | '//shop[@id=1]/ancestor::shops' || ['/shops']
- 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ['/shops/shop[@id=1]']
- 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ['/shops/shop[@id=1]/categories[@code=2]']
- 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ['/shops/shop[@id=3]/info/contact']
- 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ['/shops/shop[@id=1]']
+ 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ["/shops/shop[@id='1']"]
+ 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']"]
+ 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ["/shops/shop[@id='3']/info/contact"]
+ 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ["/shops/shop[@id='1']"]
'ancestor with parent that does not exist' | '//book/ancestor::parentDoesNoExist/categories' || []
'ancestor does not exist' | '//book/ancestor::ancestorDoesNotExist' || []
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
index ab290051a2..6f780fc508 100755
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
@@ -23,11 +23,13 @@ package org.onap.cps.spi.impl
import com.fasterxml.jackson.databind.ObjectMapper
import com.google.common.collect.ImmutableSet
+import org.onap.cps.cpspath.parser.PathParsingException
import org.onap.cps.spi.CpsDataPersistenceService
import org.onap.cps.spi.entities.FragmentEntity
import org.onap.cps.spi.exceptions.AlreadyDefinedException
import org.onap.cps.spi.exceptions.AnchorNotFoundException
import org.onap.cps.spi.exceptions.CpsAdminException
+import org.onap.cps.spi.exceptions.CpsPathException
import org.onap.cps.spi.exceptions.DataNodeNotFoundException
import org.onap.cps.spi.exceptions.DataspaceNotFoundException
import org.onap.cps.spi.model.DataNode
@@ -150,7 +152,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
thrown(expectedException)
where: 'the following data is used'
scenario | parentXpath | dataNode || expectedException
- 'parent does not exist' | 'unknown' | newDataNode || DataNodeNotFoundException
+ 'parent does not exist' | '/unknown' | newDataNode || DataNodeNotFoundException
'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNode || AlreadyDefinedException
}
@@ -185,9 +187,9 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'following parameters were used'
- scenario | parentNodeXpath | listElementXpaths || expectedException
- 'parent node does not exist' | '/unknown' | ['irrelevant'] || DataNodeNotFoundException
- 'already existing fragment' | '/parent-201' | ['/parent-201/child-204[@key="A"]'] || AlreadyDefinedException
+ scenario | parentNodeXpath | listElementXpaths || expectedException
+ 'parent node does not exist' | '/unknown' | ['irrelevant'] || DataNodeNotFoundException
+ 'data fragment already exists' | '/parent-201' | ["/parent-201/child-204[@key='A']"] || AlreadyDefinedException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -208,6 +210,15 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
+ def 'Cps Path query with syntax error throws a CPS Path Exception.'() {
+ when: 'trying to execute a query with a syntax (parsing) error'
+ objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, 'invalid-cps-path/child' , OMIT_DESCENDANTS)
+ then: 'exception is thrown'
+ def exceptionThrown = thrown(CpsPathException)
+ assert exceptionThrown.getDetails().contains('failed to parse at line 1 due to extraneous input \'invalid-cps-path\' expecting \'/\'')
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
def 'Get data node by xpath with all descendants.'() {
when: 'data node is requested with all descendants'
def result = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
@@ -235,10 +246,10 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NO XPATH' || DataNodeNotFoundException
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NO XPATH' || DataNodeNotFoundException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -265,10 +276,10 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NON-EXISTING XPATH' || DataNodeNotFoundException
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -359,10 +370,10 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NON-EXISTING XPATH' || DataNodeNotFoundException
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -468,10 +479,10 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
assert remainingChildXpaths.containsAll(expectedRemainingChildXpaths)
where: 'following parameters were used'
scenario | targetXpaths | parentFragmentId || expectedRemainingChildXpaths
- 'list element with key' | '/parent-203/child-204[@key="A"]' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ['/parent-203/child-203', '/parent-203/child-204[@key="B"]']
- 'list element with combined keys' | '/parent-202/child-205[@key="A" and @key2="B"]' | LIST_DATA_NODE_PARENT202_FRAGMENT_ID || ['/parent-202/child-206[@key="A"]']
+ 'list element with key' | '/parent-203/child-204[@key="A"]' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ["/parent-203/child-203", "/parent-203/child-204[@key='B']"]
+ 'list element with combined keys' | '/parent-202/child-205[@key="A" and @key2="B"]' | LIST_DATA_NODE_PARENT202_FRAGMENT_ID || ["/parent-202/child-206[@key='A']"]
'whole list' | '/parent-203/child-204' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ['/parent-203/child-203']
- 'list element under list element' | '/parent-203/child-204[@key="B"]/grand-child-204[@key2="Y"]' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ['/parent-203/child-203', '/parent-203/child-204[@key="A"]', '/parent-203/child-204[@key="B"]']
+ 'list element under list element' | '/parent-203/child-204[@key="B"]/grand-child-204[@key2="Y"]' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ["/parent-203/child-203", "/parent-203/child-204[@key='A']", "/parent-203/child-204[@key='B']"]
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -510,9 +521,9 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
'child of target' | '/parent-206/child-206' | '/parent-206/child-206' || null
'child data node, parent still exists' | '/parent-206/child-206' | '/parent-206' || '/parent-206'
'list element' | '/parent-206/child-206/grand-child-206[@key="A"]' | '/parent-206/child-206/grand-child-206[@key="A"]' || null
- 'list element, sibling still exists' | '/parent-206/child-206/grand-child-206[@key="A"]' | '/parent-206/child-206/grand-child-206[@key="X"]' || '/parent-206/child-206/grand-child-206[@key="X"]'
+ 'list element, sibling still exists' | '/parent-206/child-206/grand-child-206[@key="A"]' | '/parent-206/child-206/grand-child-206[@key="X"]' || "/parent-206/child-206/grand-child-206[@key='X']"
'container node' | '/parent-206' | '/parent-206' || null
- 'container list node' | '/parent-206[@key="A"]' | '/parent-206[@key="B"]' || '/parent-206[@key="B"]'
+ 'container list node' | '/parent-206[@key="A"]' | '/parent-206[@key="B"]' || "/parent-206[@key='B']"
'root node with xpath /' | '/' | '/' || null
'root node with xpath passed as blank' | '' | '' || null
@@ -523,11 +534,11 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
when: 'data node is deleted'
objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, datanodeXpath)
then: 'a #expectedException is thrown'
- thrown(DataNodeNotFoundException)
+ thrown(expectedException)
where: 'the following parameters were used'
- scenario | datanodeXpath
- 'valid data node, non existent child node' | '/parent-203/child-non-existent'
- 'invalid list element' | '/parent-206/child-206/grand-child-206@key="A"]'
+ scenario | datanodeXpath | expectedException
+ 'valid data node, non existent child node' | '/parent-203/child-non-existent' | DataNodeNotFoundException
+ 'invalid list element' | '/parent-206/child-206/grand-child-206@key="A"]' | PathParsingException
}
@Sql([CLEAR_DATA, SET_DATA])
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
index 7166008ad3..b37f471e76 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
@@ -1,6 +1,7 @@
/*
* ============LICENSE_START=======================================================
* Copyright (c) 2021 Bell Canada.
+ * Modifications Copyright (C) 2021-2022 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -28,88 +29,111 @@ import org.onap.cps.spi.model.DataNodeBuilder
import org.onap.cps.spi.repository.AnchorRepository
import org.onap.cps.spi.repository.DataspaceRepository
import org.onap.cps.spi.repository.FragmentRepository
+import org.onap.cps.spi.utils.SessionManager
import org.onap.cps.utils.JsonObjectMapper
import spock.lang.Specification
-
class CpsDataPersistenceServiceSpec extends Specification {
def mockDataspaceRepository = Mock(DataspaceRepository)
def mockAnchorRepository = Mock(AnchorRepository)
def mockFragmentRepository = Mock(FragmentRepository)
def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper())
+ def mockSessionManager = Mock(SessionManager)
def objectUnderTest = new CpsDataPersistenceServiceImpl(
- mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper)
+ mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper,mockSessionManager)
def 'Handling of StaleStateException (caused by concurrent updates) during data node tree update.'() {
- def parentXpath = 'parent-01'
+ def parentXpath = '/parent-01'
def myDataspaceName = 'my-dataspace'
def myAnchorName = 'my-anchor'
given: 'data node object'
- def submittedDataNode = new DataNodeBuilder()
- .withXpath(parentXpath)
- .withLeaves(['leaf-name': 'leaf-value'])
- .build()
+ def submittedDataNode = new DataNodeBuilder()
+ .withXpath(parentXpath)
+ .withLeaves(['leaf-name': 'leaf-value'])
+ .build()
and: 'fragment to be updated'
- mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
- def fragmentEntity = new FragmentEntity()
- fragmentEntity.setXpath(parentXpath)
- fragmentEntity.setChildFragments(Collections.emptySet())
- return fragmentEntity
- }
+ mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
+ def fragmentEntity = new FragmentEntity()
+ fragmentEntity.setXpath(parentXpath)
+ fragmentEntity.setChildFragments(Collections.emptySet())
+ return fragmentEntity
+ }
and: 'data node is concurrently updated by another transaction'
- mockFragmentRepository.save(_) >> { throw new StaleStateException("concurrent updates") }
+ mockFragmentRepository.save(_) >> { throw new StaleStateException("concurrent updates") }
when: 'attempt to update data node'
- objectUnderTest.replaceDataNodeTree(myDataspaceName, myAnchorName, submittedDataNode)
+ objectUnderTest.replaceDataNodeTree(myDataspaceName, myAnchorName, submittedDataNode)
then: 'concurrency exception is thrown'
- def concurrencyException = thrown(ConcurrencyException)
- assert concurrencyException.getDetails().contains(myDataspaceName)
- assert concurrencyException.getDetails().contains(myAnchorName)
- assert concurrencyException.getDetails().contains(parentXpath)
+ def concurrencyException = thrown(ConcurrencyException)
+ assert concurrencyException.getDetails().contains(myDataspaceName)
+ assert concurrencyException.getDetails().contains(myAnchorName)
+ assert concurrencyException.getDetails().contains(parentXpath)
}
def 'Retrieving a data node with a property JSON value of #scenario'() {
given: 'a fragment with a property JSON value of #scenario'
- mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
- new FragmentEntity(childFragments: Collections.emptySet(),
- attributes: "{\"some attribute\": ${dataString}}")
- }
+ mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
+ new FragmentEntity(childFragments: Collections.emptySet(),
+ attributes: "{\"some attribute\": ${dataString}}")
+ }
when: 'getting the data node represented by this fragment'
- def dataNode = objectUnderTest.getDataNode('my-dataspace', 'my-anchor',
- 'parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
+ def dataNode = objectUnderTest.getDataNode('my-dataspace', 'my-anchor',
+ '/parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
then: 'the leaf is of the correct value and data type'
- def attributeValue = dataNode.leaves.get('some attribute')
- assert attributeValue == expectedValue
- assert attributeValue.class == expectedDataClass
+ def attributeValue = dataNode.leaves.get('some attribute')
+ assert attributeValue == expectedValue
+ assert attributeValue.class == expectedDataClass
where: 'the following Data Type is passed'
- scenario | dataString || expectedValue | expectedDataClass
- 'just numbers' | '15174' || 15174 | Integer
- 'number with dot' | '15174.32' || 15174.32 | Double
- 'number with 0 value after dot' | '15174.0' || 15174.0 | Double
- 'number with 0 value before dot' | '0.32' || 0.32 | Double
- 'number higher than max int' | '2147483648' || 2147483648 | Long
- 'just text' | '"Test"' || 'Test' | String
- 'number with exponent' | '1.2345e5' || 1.2345e5 | Double
- 'number higher than max int with dot' | '123456789101112.0' || 123456789101112.0 | Double
- 'text and numbers' | '"String = \'1234\'"' || "String = '1234'" | String
- 'number as String' | '"12345"' || '12345' | String
+ scenario | dataString || expectedValue | expectedDataClass
+ 'just numbers' | '15174' || 15174 | Integer
+ 'number with dot' | '15174.32' || 15174.32 | Double
+ 'number with 0 value after dot' | '15174.0' || 15174.0 | Double
+ 'number with 0 value before dot' | '0.32' || 0.32 | Double
+ 'number higher than max int' | '2147483648' || 2147483648 | Long
+ 'just text' | '"Test"' || 'Test' | String
+ 'number with exponent' | '1.2345e5' || 1.2345e5 | Double
+ 'number higher than max int with dot' | '123456789101112.0' || 123456789101112.0 | Double
+ 'text and numbers' | '"String = \'1234\'"' || "String = '1234'" | String
+ 'number as String' | '"12345"' || '12345' | String
}
def 'Retrieving a data node with invalid JSON'() {
given: 'a fragment with invalid JSON'
- mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
- new FragmentEntity(childFragments: Collections.emptySet(), attributes: '{invalid json')
- }
+ mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
+ new FragmentEntity(childFragments: Collections.emptySet(), attributes: '{invalid json')
+ }
when: 'getting the data node represented by this fragment'
- def dataNode = objectUnderTest.getDataNode('my-dataspace', 'my-anchor',
- 'parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
+ def dataNode = objectUnderTest.getDataNode('my-dataspace', 'my-anchor',
+ '/parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
then: 'a data validation exception is thrown'
- thrown(DataValidationException)
+ thrown(DataValidationException)
+ }
+
+ def 'start session'() {
+ when: 'start session'
+ objectUnderTest.startSession()
+ then: 'the session manager method to start session is invoked'
+ 1 * mockSessionManager.startSession()
}
-}
+ def 'close session'() {
+ given: 'session ID'
+ def someSessionId = 'someSessionId'
+ when: 'close session method is called with session ID as parameter'
+ objectUnderTest.closeSession(someSessionId)
+ then: 'the session manager method to close session is invoked with parameter'
+ 1 * mockSessionManager.closeSession(someSessionId)
+ }
+
+ def 'Lock anchor.'(){
+ when: 'lock anchor method is called with anchor entity details'
+ objectUnderTest.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L)
+ then: 'the session manager method to lock anchor is invoked with same parameters'
+ 1 * mockSessionManager.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L)
+ }
+} \ No newline at end of file
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy
new file mode 100644
index 0000000000..9b58c8bc32
--- /dev/null
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy
@@ -0,0 +1,69 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2022 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.spi.utils
+
+import org.onap.cps.spi.exceptions.SessionManagerException
+import org.onap.cps.spi.impl.CpsPersistenceSpecBase
+import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.test.context.jdbc.Sql
+
+class SessionManagerIntegrationSpec extends CpsPersistenceSpecBase{
+
+ final static String SET_DATA = '/data/anchor.sql'
+
+ @Autowired
+ SessionManager objectUnderTest
+
+ def sessionId
+ def shortTimeoutForTesting = 200L
+
+ def setup(){
+ sessionId = objectUnderTest.startSession()
+ }
+
+ def cleanup(){
+ objectUnderTest.closeSession(sessionId)
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Lock anchor.'(){
+ when: 'session tries to acquire anchor lock by passing anchor entity details'
+ objectUnderTest.lockAnchor(sessionId, DATASPACE_NAME, ANCHOR_NAME1, shortTimeoutForTesting)
+ then: 'no exception is thrown'
+ noExceptionThrown()
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Attempt to lock anchor when another session is holding the lock.'(){
+ given: 'another session that holds an anchor lock'
+ def otherSessionId = objectUnderTest.startSession()
+ objectUnderTest.lockAnchor(otherSessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting)
+ when: 'a session tries to acquire the same anchor lock'
+ objectUnderTest.lockAnchor(sessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting)
+ then: 'a session manager exception is thrown specifying operation reached timeout'
+ def thrown = thrown(SessionManagerException)
+ thrown.message.contains('Timeout')
+ then: 'when the other session holding the lock is closed, lock can finally be acquired'
+ objectUnderTest.closeSession(otherSessionId)
+ objectUnderTest.lockAnchor(sessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting)
+ }
+
+}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy
new file mode 100644
index 0000000000..a2df06ef0e
--- /dev/null
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy
@@ -0,0 +1,99 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2022 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.spi.utils
+
+import com.google.common.util.concurrent.TimeLimiter
+import org.hibernate.HibernateException
+import org.hibernate.Transaction
+import org.onap.cps.spi.entities.AnchorEntity
+import org.onap.cps.spi.exceptions.SessionManagerException
+import org.onap.cps.spi.repository.AnchorRepository
+import org.onap.cps.spi.repository.DataspaceRepository
+import org.testcontainers.shaded.com.google.common.util.concurrent.UncheckedExecutionException
+import spock.lang.Specification
+import org.hibernate.Session
+
+import java.util.concurrent.ExecutionException
+
+class SessionManagerSpec extends Specification {
+
+ def spiedTimeLimiterProvider = Spy(TimeLimiterProvider)
+ def mockDataspaceRepository = Mock(DataspaceRepository)
+ def mockAnchorRepository = Mock(AnchorRepository)
+ def mockSession = Mock(Session)
+
+ def objectUnderTest = new SessionManager(spiedTimeLimiterProvider, mockDataspaceRepository, mockAnchorRepository)
+
+ def 'Lock anchor entity with #exceptionDuringTest exception.'(){
+ given: 'a dummy session'
+ objectUnderTest.sessionMap.put('dummySession', mockSession)
+ and: 'the anchor name can be resolved'
+ def mockAnchorEntity = Mock(AnchorEntity)
+ mockAnchorEntity.getId() > 456
+ mockAnchorRepository.getByDataspaceAndName(_, _) >> mockAnchorEntity
+ and: 'timeLimiter throws an #exceptionDuringTest exception'
+ def mockTimeLimiter = Mock(TimeLimiter)
+ spiedTimeLimiterProvider.getTimeLimiter(_) >> mockTimeLimiter
+ mockTimeLimiter.callWithTimeout(*_) >> { throw exceptionDuringTest }
+ when: 'session tries to acquire anchor lock'
+ objectUnderTest.lockAnchor('dummySession', 'some-dataspace','some-anchor', 123L)
+ then: 'a session manager exception is thrown with the expected detail'
+ def thrown = thrown(SessionManagerException)
+ thrown.details.contains(expectedExceptionDetail)
+ where:
+ exceptionDuringTest || expectedExceptionDetail
+ new InterruptedException() || 'interrupted'
+ new ExecutionException() || 'aborted'
+ }
+
+ def 'Close session that does not exist.'() {
+ when: 'attempt to close session that does not exist'
+ objectUnderTest.closeSession('unknown session id')
+ then: 'a session manager exception is thrown with the unknown id in the details'
+ def thrown = thrown(SessionManagerException)
+ assert thrown.details.contains('unknown session id')
+ }
+
+ def 'Hibernate exception while closing session.'() {
+ given: 'a test session with a transaction'
+ objectUnderTest.sessionMap.put('testSessionId', mockSession)
+ mockSession.getTransaction() >> Mock(Transaction)
+ and: 'an hibernate exception when closing that session'
+ def hibernateException = new HibernateException('test')
+ mockSession.close() >> { throw hibernateException }
+ when: 'attempt to close session'
+ objectUnderTest.closeSession('testSessionId')
+ then: 'a session manager exception is thrown with the session id in the details'
+ def thrown = thrown(SessionManagerException)
+ assert thrown.details.contains('testSessionId')
+ and: 'the original exception as cause'
+ assert thrown.cause == hibernateException
+ }
+
+ def 'Attempt to lock anchor entity with session Id that does not exists'(){
+ when: 'attempt to acquire anchor lock with session that does not exists'
+ objectUnderTest.lockAnchor('unknown session id','','',123L)
+ then: 'a session manager exception is thrown with the unknown id in the details'
+ def thrown = thrown(SessionManagerException)
+ thrown.details.contains('unknown session id')
+ }
+
+}
diff --git a/cps-ri/src/test/resources/data/cps-path-query.sql b/cps-ri/src/test/resources/data/cps-path-query.sql
index 8f525df6bd..d1a62209eb 100644
--- a/cps-ri/src/test/resources/data/cps-path-query.sql
+++ b/cps-ri/src/test/resources/data/cps-path-query.sql
@@ -1,6 +1,6 @@
/*
============LICENSE_START=======================================================
- Copyright (C) 2021 Nordix Foundation.
+ Copyright (C) 2021-2022 Nordix Foundation.
Modifications Copyright (C) 2021 Bell Canada.
================================================================================
Licensed under the Apache License, Version 2.0 (the "License");
@@ -30,25 +30,25 @@ INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
(1, 1001, 1003, null, '/shops', null),
- (2, 1001, 1003, 1, '/shops/shop[@id=1]', '{"id" : 1, "type" : "bookstore"}'),
- (3, 1001, 1003, 2, '/shops/shop[@id=1]/categories[@code=1]', '{"code" : 1, "type" : "bookstore", "name": "SciFi"}'),
- (4, 1001, 1003, 2, '/shops/shop[@id=1]/categories[@code=2]', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}'),
- (5, 1001, 1003, 3, '/shops/shop[@id=1]/categories[@code=1]/book', '{"price" : 5, "title" : "Dune", "labels" : ["special offer","classics",""]}'),
- (6, 1001, 1003, 4, '/shops/shop[@id=1]/categories[@code=2]/book', '{"price" : 15, "title" : "Chapters", "editions" : [2000,2010,2020]}'),
- (7, 1001, 1003, 5, '/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]', '{"FirstName" : "Joe", "Surname": "Bloggs","title": "Dune"}'),
- (8, 1001, 1003, 6, '/shops/shop[@id=1]/categories[@code=2]/book/author[@FirstName="Joe" and @Surname="Smith"]', '{"FirstName" : "Joe", "Surname": "Smith","title": "Chapters"}');
+ (2, 1001, 1003, 1, '/shops/shop[@id=''1'']', '{"id" : 1, "type" : "bookstore"}'),
+ (3, 1001, 1003, 2, '/shops/shop[@id=''1'']/categories[@code=''1'']', '{"code" : 1, "type" : "bookstore", "name": "SciFi"}'),
+ (4, 1001, 1003, 2, '/shops/shop[@id=''1'']/categories[@code=''2'']', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}'),
+ (5, 1001, 1003, 3, '/shops/shop[@id=''1'']/categories[@code=''1'']/book', '{"price" : 5, "title" : "Dune", "labels" : ["special offer","classics",""]}'),
+ (6, 1001, 1003, 4, '/shops/shop[@id=''1'']/categories[@code=''2'']/book', '{"price" : 15, "title" : "Chapters", "editions" : [2000,2010,2020]}'),
+ (7, 1001, 1003, 5, '/shops/shop[@id=''1'']/categories[@code=''1'']/book/author[@FirstName=''Joe'' and @Surname=''Bloggs'']', '{"FirstName" : "Joe", "Surname": "Bloggs","title": "Dune"}'),
+ (8, 1001, 1003, 6, '/shops/shop[@id=''1'']/categories[@code=''2'']/book/author[@FirstName=''Joe'' and @Surname=''Smith'']', '{"FirstName" : "Joe", "Surname": "Smith","title": "Chapters"}');
INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (9, 1001, 1003, 1, '/shops/shop[@id=2]', '{"type" : "bookstore"}'),
- (10, 1001, 1003, 9, '/shops/shop[@id=2]/categories[@code=1]', '{"code" : 2, "type" : "bookstore", "name": "Kids"}'),
- (11, 1001, 1003, 10, '/shops/shop[@id=2]/categories[@code=2]', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}');
+ (9, 1001, 1003, 1, '/shops/shop[@id=''2'']', '{"type" : "bookstore"}'),
+ (10, 1001, 1003, 9, '/shops/shop[@id=''2'']/categories[@code=''1'']', '{"code" : 2, "type" : "bookstore", "name": "Kids"}'),
+ (11, 1001, 1003, 10, '/shops/shop[@id=''2'']/categories[@code=''2'']', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}');
INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (12, 1001, 1003, 1, '/shops/shop[@id=3]', '{"type" : "garden centre"}'),
- (13, 1001, 1003, 12, '/shops/shop[@id=3]/categories[@code=1]', '{"id" : 1, "type" : "garden centre", "name": "indoor plants"}'),
- (14, 1001, 1003, 12, '/shops/shop[@id=3]/categories[@code=2]', '{"id" : 2, "type" : "garden centre", "name": "outdoor plants"}'),
- (16, 1001, 1003, 1, '/shops/shop[@id=3]/info', null),
- (17, 1001, 1003, 1, '/shops/shop[@id=3]/info/contact', null),
- (18, 1001, 1003, 1, '/shops/shop[@id=3]/info/contact/website', '{"address" : "myshop.ie"}'),
- (19, 1001, 1003, 12, '/shops/shop[@id=3]/info/contact/phonenumbers[@type="mob"]', '{"type" : "mob", "number" : "123123456"}'),
- (20, 1001, 1003, 12, '/shops/shop[@id=3]/info/contact/phonenumbers[@type="landline"]', '{"type" : "landline", "number" : "012123456"}');
+ (12, 1001, 1003, 1, '/shops/shop[@id=''3'']', '{"type" : "garden centre"}'),
+ (13, 1001, 1003, 12, '/shops/shop[@id=''3'']/categories[@code=''1'']', '{"id" : 1, "type" : "garden centre", "name": "indoor plants"}'),
+ (14, 1001, 1003, 12, '/shops/shop[@id=''3'']/categories[@code=''2'']', '{"id" : 2, "type" : "garden centre", "name": "outdoor plants"}'),
+ (16, 1001, 1003, 1, '/shops/shop[@id=''3'']/info', null),
+ (17, 1001, 1003, 1, '/shops/shop[@id=''3'']/info/contact', null),
+ (18, 1001, 1003, 1, '/shops/shop[@id=''3'']/info/contact/website', '{"address" : "myshop.ie"}'),
+ (19, 1001, 1003, 12, '/shops/shop[@id=''3'']/info/contact/phonenumbers[@type=''mob'']', '{"type" : "mob", "number" : "123123456"}'),
+ (20, 1001, 1003, 12, '/shops/shop[@id=''3'']/info/contact/phonenumbers[@type=''landline'']', '{"type" : "landline", "number" : "012123456"}');
diff --git a/cps-ri/src/test/resources/data/fragment.sql b/cps-ri/src/test/resources/data/fragment.sql
index a27bb5fdea..4106541061 100755
--- a/cps-ri/src/test/resources/data/fragment.sql
+++ b/cps-ri/src/test/resources/data/fragment.sql
@@ -1,6 +1,6 @@
/*
============LICENSE_START=======================================================
- Copyright (C) 2021 Nordix Foundation.
+ Copyright (C) 2021-2022 Nordix Foundation.
Modifications Copyright (C) 2021 Pantheon.tech
Modifications Copyright (C) 2021-2022 Bell Canada.
================================================================================
@@ -21,14 +21,16 @@
*/
INSERT INTO DATASPACE (ID, NAME) VALUES
- (1001, 'DATASPACE-001');
+ (1001, 'DATASPACE-001'),
+ (1002, 'NCMP-Admin');
INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES
(2001, 'SCHEMA-SET-001', 1001);
INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
(3001, 'ANCHOR-001', 1001, 2001),
- (3003, 'ANCHOR-003', 1001, 2001);
+ (3003, 'ANCHOR-003', 1001, 2001),
+ (3004, 'ncmp-dmi-registry', 1002, 2001);
INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH) VALUES
(4001, 1001, 3001, null, '/parent-1'),
@@ -50,21 +52,32 @@ INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES)
(4203, 1001, 3003, 4202, '/parent-200/child-201/grand-child', '{"leaf-value": "original"}'),
(4206, 1001, 3003, null, '/parent-201', '{"leaf-value": "original"}'),
(4207, 1001, 3003, 4206, '/parent-201/child-203', '{}'),
- (4208, 1001, 3003, 4206, '/parent-201/child-204[@key="A"]', '{"key": "A"}'),
- (4209, 1001, 3003, 4206, '/parent-201/child-204[@key="B"]', '{"key": "B"}'),
+ (4208, 1001, 3003, 4206, '/parent-201/child-204[@key=''A'']', '{"key": "A"}'),
+ (4209, 1001, 3003, 4206, '/parent-201/child-204[@key=''B'']', '{"key": "B"}'),
(4211, 1001, 3003, null, '/parent-202', '{"leaf-value": "original"}'),
- (4212, 1001, 3003, 4211, '/parent-202/child-205[@key="A" and @key2="B"]', '{"key": "A", "key2": "B"}'),
- (4213, 1001, 3003, 4211, '/parent-202/child-206[@key="A"]', '{"key": "A"}'),
+ (4212, 1001, 3003, 4211, '/parent-202/child-205[@key=''A'' and @key2=''B'']', '{"key": "A", "key2": "B"}'),
+ (4213, 1001, 3003, 4211, '/parent-202/child-206[@key=''A'']', '{"key": "A"}'),
(4214, 1001, 3003, null, '/parent-203', '{"leaf-value": "original"}'),
(4215, 1001, 3003, 4214, '/parent-203/child-203', '{}'),
- (4216, 1001, 3003, 4214, '/parent-203/child-204[@key="A"]', '{"key": "A"}'),
- (4217, 1001, 3003, 4214, '/parent-203/child-204[@key="B"]', '{"key": "B"}'),
- (4218, 1001, 3003, 4217, '/parent-203/child-204[@key="B"]/grand-child-204[@key2="Y"]', '{"key": "B", "key2": "Y"}'),
+ (4216, 1001, 3003, 4214, '/parent-203/child-204[@key=''A'']', '{"key": "A"}'),
+ (4217, 1001, 3003, 4214, '/parent-203/child-204[@key=''B'']', '{"key": "B"}'),
+ (4218, 1001, 3003, 4217, '/parent-203/child-204[@key=''B'']/grand-child-204[@key2=''Y'']', '{"key": "B", "key2": "Y"}'),
(4226, 1001, 3003, null, '/parent-206', '{"leaf-value": "original"}'),
(4227, 1001, 3003, 4226, '/parent-206/child-206', '{}'),
(4228, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206', '{}'),
- (4229, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206[@key="A"]', '{"key": "A"}'),
- (4230, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206[@key="X"]', '{"key": "X"}'),
- (4231, 1001, 3003, null, '/parent-206[@key="A"]', '{"key": "A"}'),
- (4232, 1001, 3003, 4231, '/parent-206[@key="A"]/child-206', '{}'),
- (4233, 1001, 3003, null, '/parent-206[@key="B"]', '{"key": "B"}'); \ No newline at end of file
+ (4229, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206[@key=''A'']', '{"key": "A"}'),
+ (4230, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206[@key=''X'']', '{"key": "X"}'),
+ (4231, 1001, 3003, null, '/parent-206[@key=''A'']', '{"key": "A"}'),
+ (4232, 1001, 3003, 4231, '/parent-206[@key=''A'']/child-206', '{}'),
+ (4233, 1001, 3003, null, '/parent-206[@key=''B'']', '{"key": "B"}');
+
+INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
+ (5000, 1002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo'']', '{"id": "PNFDemo", "dmi-service-name": "http://172.21.235.14:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
+ (5001, 1002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo2'']', '{"id": "PNFDemo2", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
+ (5002, 1002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo3'']', '{"id": "PNFDemo3", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
+ (5003, 1002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo4'']', '{"id": "PNFDemo4", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
+ (5004, 1002, 3004, 5000, '/dmi-registry/cm-handles[@id=''PNFDemo'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'),
+ (5005, 1002, 3004, 5001, '/dmi-registry/cm-handles[@id=''PNFDemo2'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'),
+ (5006, 1002, 3004, 5002, '/dmi-registry/cm-handles[@id=''PNFDemo3'']/public-properties[@name=''Contact'']', '{"name": "Contact3", "value": "PNF3@bookstore.com"}'),
+ (5007, 1002, 3004, 5003, '/dmi-registry/cm-handles[@id=''PNFDemo4'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'),
+ (5008, 1002, 3004, 5004, '/dmi-registry/cm-handles[@id=''PNFDemo4'']/public-properties[@name=''Contact2'']', '{"name": "Contact2", "value": "newemailforstore2@bookstore.com"}');
diff --git a/cps-ri/src/test/resources/hibernate.cfg.xml b/cps-ri/src/test/resources/hibernate.cfg.xml
new file mode 100644
index 0000000000..fae9275ddc
--- /dev/null
+++ b/cps-ri/src/test/resources/hibernate.cfg.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE hibernate-configuration PUBLIC
+ "-//Hibernate/Hibernate Configuration DTD 3.0//EN"
+ "http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
+
+<hibernate-configuration>
+ <session-factory>
+ <property name="hibernate.connection.driver_class">org.postgresql.Driver</property>
+ <property name="hibernate.connection.url">${DB_URL}</property>
+ <property name="hibernate.connection.username">${DB_USERNAME}</property>
+ <property name="hibernate.connection.password">${DB_PASSWORD}</property>
+ <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQL82Dialect</property>
+ <property name="show_sql">true</property>
+ <property name="hibernate.hbm2ddl.auto">none</property>
+ </session-factory>
+</hibernate-configuration> \ No newline at end of file