diff options
Diffstat (limited to 'cps-ri/src')
22 files changed, 1251 insertions, 374 deletions
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java index 27891c525e..6b1162d11b 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java @@ -31,14 +31,13 @@ import lombok.NoArgsConstructor; public class FragmentEntityArranger { /** - * Convert a collection of (related) FragmentExtracts into a FragmentEntity (tree) with descendants. - * Multiple top level nodes not yet support. If found only the first top level element is returned + * Convert a collection of (related) FragmentExtracts into FragmentEntities (trees) with descendants. * * @param anchorEntity the anchor(entity) all the fragments belong to * @param fragmentExtracts FragmentExtracts to convert - * @return a FragmentEntity (tree) with descendants, null if none found. + * @return a collection of FragmentEntities (trees) with descendants. */ - public static FragmentEntity toFragmentEntityTree(final AnchorEntity anchorEntity, + public static Collection<FragmentEntity> toFragmentEntityTrees(final AnchorEntity anchorEntity, final Collection<FragmentExtract> fragmentExtracts) { final Map<Long, FragmentEntity> fragmentEntityPerId = new HashMap<>(); for (final FragmentExtract fragmentExtract : fragmentExtracts) { @@ -61,7 +60,8 @@ public class FragmentEntityArranger { return fragmentEntity; } - private static FragmentEntity reuniteChildrenWithTheirParents(final Map<Long, FragmentEntity> fragmentEntityPerId) { + private static Collection<FragmentEntity> reuniteChildrenWithTheirParents( + final Map<Long, FragmentEntity> fragmentEntityPerId) { final Collection<FragmentEntity> fragmentEntitiesWithoutParentInResultSet = new HashSet<>(); for (final FragmentEntity fragmentEntity : fragmentEntityPerId.values()) { final FragmentEntity parentFragmentEntity = fragmentEntityPerId.get(fragmentEntity.getParentId()); @@ -71,7 +71,7 @@ public class FragmentEntityArranger { parentFragmentEntity.getChildFragments().add(fragmentEntity); } } - return fragmentEntitiesWithoutParentInResultSet.stream().findFirst().orElse(null); + return fragmentEntitiesWithoutParentInResultSet; } } diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java index ac1be1cd30..06ee8ecada 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java @@ -1,8 +1,9 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2022 Nordix Foundation + * Copyright (C) 2021-2023 Nordix Foundation * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2020-2022 Bell Canada. + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,6 +25,7 @@ package org.onap.cps.spi.impl; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet.Builder; +import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -59,6 +61,7 @@ import org.onap.cps.spi.model.DataNode; import org.onap.cps.spi.model.DataNodeBuilder; import org.onap.cps.spi.repository.AnchorRepository; import org.onap.cps.spi.repository.DataspaceRepository; +import org.onap.cps.spi.repository.FragmentQueryBuilder; import org.onap.cps.spi.repository.FragmentRepository; import org.onap.cps.spi.utils.SessionManager; import org.onap.cps.utils.JsonObjectMapper; @@ -77,9 +80,6 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService private final SessionManager sessionManager; private static final String REG_EX_FOR_OPTIONAL_LIST_INDEX = "(\\[@[\\s\\S]+?]){0,1})"; - private static final Pattern REG_EX_PATTERN_FOR_LIST_ELEMENT_KEY_PREDICATE = - Pattern.compile("\\[(\\@([^\\/]{0,9999}))\\]$"); - private static final String TOP_LEVEL_MODULE_PREFIX_PROPERTY_NAME = "topLevelModulePrefix"; @Override public void addChildDataNode(final String dataspaceName, final String anchorName, final String parentNodeXpath, @@ -88,6 +88,12 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService } @Override + public void addChildDataNodes(final String dataspaceName, final String anchorName, + final String parentNodeXpath, final Collection<DataNode> dataNodes) { + addChildrenDataNodes(dataspaceName, anchorName, parentNodeXpath, dataNodes); + } + + @Override public void addListElements(final String dataspaceName, final String anchorName, final String parentNodeXpath, final Collection<DataNode> newListElements) { addChildrenDataNodes(dataspaceName, anchorName, parentNodeXpath, newListElements); @@ -166,14 +172,45 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService @Override public void storeDataNode(final String dataspaceName, final String anchorName, final DataNode dataNode) { + storeDataNodes(dataspaceName, anchorName, Collections.singletonList(dataNode)); + } + + @Override + public void storeDataNodes(final String dataspaceName, final String anchorName, + final Collection<DataNode> dataNodes) { final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); - final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(dataspaceEntity, anchorEntity, - dataNode); + final List<FragmentEntity> fragmentEntities = new ArrayList<>(dataNodes.size()); try { - fragmentRepository.save(fragmentEntity); + for (final DataNode dataNode: dataNodes) { + final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(dataspaceEntity, anchorEntity, + dataNode); + fragmentEntities.add(fragmentEntity); + } + fragmentRepository.saveAll(fragmentEntities); } catch (final DataIntegrityViolationException exception) { - throw AlreadyDefinedException.forDataNode(dataNode.getXpath(), anchorName, exception); + log.warn("Exception occurred : {} , While saving : {} data nodes, Retrying saving data nodes individually", + exception, dataNodes.size()); + storeDataNodesIndividually(dataspaceName, anchorName, dataNodes); + } + } + + private void storeDataNodesIndividually(final String dataspaceName, final String anchorName, + final Collection<DataNode> dataNodes) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); + final Collection<String> failedXpaths = new HashSet<>(); + for (final DataNode dataNode: dataNodes) { + try { + final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(dataspaceEntity, anchorEntity, + dataNode); + fragmentRepository.save(fragmentEntity); + } catch (final DataIntegrityViolationException e) { + failedXpaths.add(dataNode.getXpath()); + } + } + if (!failedXpaths.isEmpty()) { + throw new AlreadyDefinedExceptionBatch(failedXpaths); } } @@ -219,6 +256,27 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService return toDataNode(fragmentEntity, fetchDescendantsOption); } + @Override + public Collection<DataNode> getDataNodes(final String dataspaceName, final String anchorName, + final Collection<String> xpaths, + final FetchDescendantsOption fetchDescendantsOption) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); + + final Set<String> normalizedXpaths = new HashSet<>(xpaths.size()); + for (final String xpath : xpaths) { + try { + normalizedXpaths.add(CpsPathUtil.getNormalizedXpath(xpath)); + } catch (final PathParsingException e) { + log.warn("Error parsing xpath \"{}\" in getDataNodes: {}", xpath, e.getMessage()); + } + } + + final List<FragmentEntity> fragmentEntities = + fragmentRepository.findByAnchorAndMultipleCpsPaths(anchorEntity.getId(), normalizedXpaths); + return toDataNodes(fragmentEntities, fetchDescendantsOption); + } + private FragmentEntity getFragmentWithoutDescendantsByXpath(final String dataspaceName, final String anchorName, final String xpath) { @@ -229,37 +287,37 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService final String xpath, final FetchDescendantsOption fetchDescendantsOption) { final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); + final FragmentEntity fragmentEntity; if (isRootXpath(xpath)) { final List<FragmentExtract> fragmentExtracts = fragmentRepository.getTopLevelFragments(dataspaceEntity, anchorEntity); - final FragmentEntity fragmentEntity = FragmentEntityArranger.toFragmentEntityTree(anchorEntity, - fragmentExtracts); - return fragmentEntity; + fragmentEntity = FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts) + .stream().findFirst().orElse(null); } else { final String normalizedXpath = getNormalizedXpath(xpath); - final FragmentEntity fragmentEntity; if (FetchDescendantsOption.OMIT_DESCENDANTS.equals(fetchDescendantsOption)) { fragmentEntity = fragmentRepository.getByDataspaceAndAnchorAndXpath(dataspaceEntity, anchorEntity, normalizedXpath); } else { - fragmentEntity = buildFragmentEntityFromFragmentExtracts(anchorEntity, normalizedXpath); - } - if (fragmentEntity == null) { - throw new DataNodeNotFoundException(dataspaceEntity.getName(), anchorEntity.getName(), xpath); + fragmentEntity = buildFragmentEntitiesFromFragmentExtracts(anchorEntity, normalizedXpath) + .stream().findFirst().orElse(null); } - return fragmentEntity; } + if (fragmentEntity == null) { + throw new DataNodeNotFoundException(dataspaceEntity.getName(), anchorEntity.getName(), xpath); + } + return fragmentEntity; + } - private FragmentEntity buildFragmentEntityFromFragmentExtracts(final AnchorEntity anchorEntity, - final String normalizedXpath) { - final FragmentEntity fragmentEntity; + private Collection<FragmentEntity> buildFragmentEntitiesFromFragmentExtracts(final AnchorEntity anchorEntity, + final String normalizedXpath) { final List<FragmentExtract> fragmentExtracts = fragmentRepository.findByAnchorIdAndParentXpath(anchorEntity.getId(), normalizedXpath); log.debug("Fetched {} fragment entities by anchor {} and cps path {}.", fragmentExtracts.size(), anchorEntity.getName(), normalizedXpath); - fragmentEntity = FragmentEntityArranger.toFragmentEntityTree(anchorEntity, fragmentExtracts); - return fragmentEntity; + return FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts); + } @Override @@ -273,32 +331,72 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService } catch (final PathParsingException e) { throw new CpsPathException(e.getMessage()); } - List<FragmentEntity> fragmentEntities = - fragmentRepository.findByAnchorAndCpsPath(anchorEntity.getId(), cpsPathQuery); + + Collection<FragmentEntity> fragmentEntities; + if (canUseRegexQuickFind(fetchDescendantsOption, cpsPathQuery)) { + return getDataNodesUsingRegexQuickFind(fetchDescendantsOption, anchorEntity, cpsPathQuery); + } + fragmentEntities = fragmentRepository.findByAnchorAndCpsPath(anchorEntity.getId(), cpsPathQuery); if (cpsPathQuery.hasAncestorAxis()) { - final Set<String> ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery); - fragmentEntities = ancestorXpaths.isEmpty() ? Collections.emptyList() - : fragmentRepository.findAllByAnchorAndXpathIn(anchorEntity, ancestorXpaths); + fragmentEntities = getAncestorFragmentEntities(anchorEntity.getId(), cpsPathQuery, fragmentEntities); } - return createDataNodesFromFragmentEntities(fetchDescendantsOption, anchorEntity, - fragmentEntities); + return createDataNodesFromProxiedFragmentEntities(fetchDescendantsOption, anchorEntity, fragmentEntities); } - private List<DataNode> createDataNodesFromFragmentEntities(final FetchDescendantsOption fetchDescendantsOption, - final AnchorEntity anchorEntity, - final List<FragmentEntity> fragmentEntities) { - final List<DataNode> dataNodes = new ArrayList<>(fragmentEntities.size()); - for (final FragmentEntity proxiedFragmentEntity : fragmentEntities) { - final DataNode dataNode; + private static boolean canUseRegexQuickFind(final FetchDescendantsOption fetchDescendantsOption, + final CpsPathQuery cpsPathQuery) { + return fetchDescendantsOption.equals(FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + && !cpsPathQuery.hasLeafConditions() + && !cpsPathQuery.hasTextFunctionCondition(); + } + + private List<DataNode> getDataNodesUsingRegexQuickFind(final FetchDescendantsOption fetchDescendantsOption, + final AnchorEntity anchorEntity, + final CpsPathQuery cpsPathQuery) { + Collection<FragmentEntity> fragmentEntities; + final String xpathRegex = FragmentQueryBuilder.getXpathSqlRegex(cpsPathQuery, true); + final List<FragmentExtract> fragmentExtracts = + fragmentRepository.quickFindWithDescendants(anchorEntity.getId(), xpathRegex); + fragmentEntities = FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts); + if (cpsPathQuery.hasAncestorAxis()) { + fragmentEntities = getAncestorFragmentEntities(anchorEntity.getId(), cpsPathQuery, fragmentEntities); + } + return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); + } + + private Collection<FragmentEntity> getAncestorFragmentEntities(final int anchorId, + final CpsPathQuery cpsPathQuery, + final Collection<FragmentEntity> fragmentEntities) { + final Collection<String> ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery); + return ancestorXpaths.isEmpty() ? Collections.emptyList() + : fragmentRepository.findByAnchorAndMultipleCpsPaths(anchorId, ancestorXpaths); + } + + private List<DataNode> createDataNodesFromProxiedFragmentEntities( + final FetchDescendantsOption fetchDescendantsOption, + final AnchorEntity anchorEntity, + final Collection<FragmentEntity> proxiedFragmentEntities) { + final List<DataNode> dataNodes = new ArrayList<>(proxiedFragmentEntities.size()); + for (final FragmentEntity proxiedFragmentEntity : proxiedFragmentEntities) { if (FetchDescendantsOption.OMIT_DESCENDANTS.equals(fetchDescendantsOption)) { - dataNode = toDataNode(proxiedFragmentEntity, fetchDescendantsOption); + dataNodes.add(toDataNode(proxiedFragmentEntity, fetchDescendantsOption)); } else { final String normalizedXpath = getNormalizedXpath(proxiedFragmentEntity.getXpath()); - final FragmentEntity unproxiedFragmentEntity = buildFragmentEntityFromFragmentExtracts(anchorEntity, - normalizedXpath); - dataNode = toDataNode(unproxiedFragmentEntity, fetchDescendantsOption); + final Collection<FragmentEntity> unproxiedFragmentEntities = + buildFragmentEntitiesFromFragmentExtracts(anchorEntity, normalizedXpath); + for (final FragmentEntity unproxiedFragmentEntity : unproxiedFragmentEntities) { + dataNodes.add(toDataNode(unproxiedFragmentEntity, fetchDescendantsOption)); + } } - dataNodes.add(dataNode); + } + return Collections.unmodifiableList(dataNodes); + } + + private List<DataNode> createDataNodesFromFragmentEntities(final FetchDescendantsOption fetchDescendantsOption, + final Collection<FragmentEntity> fragmentEntities) { + final List<DataNode> dataNodes = new ArrayList<>(fragmentEntities.size()); + for (final FragmentEntity fragmentEntity : fragmentEntities) { + dataNodes.add(toDataNode(fragmentEntity, fetchDescendantsOption)); } return Collections.unmodifiableList(dataNodes); } @@ -329,7 +427,7 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService sessionManager.lockAnchor(sessionId, dataspaceName, anchorName, timeoutInMilliseconds); } - private static Set<String> processAncestorXpath(final List<FragmentEntity> fragmentEntities, + private static Set<String> processAncestorXpath(final Collection<FragmentEntity> fragmentEntities, final CpsPathQuery cpsPathQuery) { final Set<String> ancestorXpath = new HashSet<>(); final Pattern pattern = @@ -347,7 +445,7 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService private DataNode toDataNode(final FragmentEntity fragmentEntity, final FetchDescendantsOption fetchDescendantsOption) { final List<DataNode> childDataNodes = getChildDataNodes(fragmentEntity, fetchDescendantsOption); - Map<String, Object> leaves = new HashMap<>(); + Map<String, Serializable> leaves = new HashMap<>(); if (fragmentEntity.getAttributes() != null) { leaves = jsonObjectMapper.convertJsonString(fragmentEntity.getAttributes(), Map.class); } @@ -357,6 +455,15 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService .withChildDataNodes(childDataNodes).build(); } + private Collection<DataNode> toDataNodes(final Collection<FragmentEntity> fragmentEntities, + final FetchDescendantsOption fetchDescendantsOption) { + final Collection<DataNode> dataNodes = new ArrayList<>(fragmentEntities.size()); + for (final FragmentEntity fragmentEntity : fragmentEntities) { + dataNodes.add(toDataNode(fragmentEntity, fetchDescendantsOption)); + } + return dataNodes; + } + private List<DataNode> getChildDataNodes(final FragmentEntity fragmentEntity, final FetchDescendantsOption fetchDescendantsOption) { if (fetchDescendantsOption.hasNext()) { @@ -369,9 +476,11 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService @Override public void updateDataLeaves(final String dataspaceName, final String anchorName, final String xpath, - final Map<String, Object> leaves) { + final Map<String, Serializable> updateLeaves) { final FragmentEntity fragmentEntity = getFragmentWithoutDescendantsByXpath(dataspaceName, anchorName, xpath); - fragmentEntity.setAttributes(jsonObjectMapper.asJsonString(leaves)); + final String currentLeavesAsString = fragmentEntity.getAttributes(); + final String mergedLeaves = mergeLeaves(updateLeaves, currentLeavesAsString); + fragmentEntity.setAttributes(mergedLeaves); fragmentRepository.save(fragmentEntity); } @@ -512,13 +621,10 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService if (isRootContainerNodeXpath(targetXpath)) { parentNodeXpath = targetXpath; } else { - parentNodeXpath = targetXpath.substring(0, targetXpath.lastIndexOf('/')); + parentNodeXpath = CpsPathUtil.getNormalizedParentXpath(targetXpath); } parentFragmentEntity = getFragmentWithoutDescendantsByXpath(dataspaceName, anchorName, parentNodeXpath); - final String lastXpathElement = targetXpath.substring(targetXpath.lastIndexOf('/')); - final boolean isListElement = REG_EX_PATTERN_FOR_LIST_ELEMENT_KEY_PREDICATE - .matcher(lastXpathElement).find(); - if (isListElement) { + if (CpsPathUtil.isPathToListElement(targetXpath)) { targetDeleted = deleteDataNode(parentFragmentEntity, targetXpath); } else { targetDeleted = deleteAllListElements(parentFragmentEntity, targetXpath); @@ -619,4 +725,14 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService private static boolean isRootXpath(final String xpath) { return "/".equals(xpath) || "".equals(xpath); } + + private String mergeLeaves(final Map<String, Serializable> updateLeaves, final String currentLeavesAsString) { + final Map<String, Serializable> currentLeavesAsMap = currentLeavesAsString.isEmpty() + ? new HashMap<>() : jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class); + currentLeavesAsMap.putAll(updateLeaves); + if (currentLeavesAsMap.isEmpty()) { + return ""; + } + return jsonObjectMapper.asJsonString(currentLeavesAsMap); + } } diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java index 8008e0324a..c9f9a78ef5 100755 --- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java @@ -3,6 +3,7 @@ * Copyright (C) 2020-2022 Nordix Foundation * Modifications Copyright (C) 2020-2022 Bell Canada. * Modifications Copyright (C) 2021 Pantheon.tech + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -55,6 +56,7 @@ import org.onap.cps.spi.exceptions.DuplicatedYangResourceException; import org.onap.cps.spi.exceptions.ModelValidationException; import org.onap.cps.spi.model.ModuleDefinition; import org.onap.cps.spi.model.ModuleReference; +import org.onap.cps.spi.model.SchemaSet; import org.onap.cps.spi.repository.DataspaceRepository; import org.onap.cps.spi.repository.ModuleReferenceRepository; import org.onap.cps.spi.repository.SchemaSetRepository; @@ -155,6 +157,14 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ } @Override + public Collection<SchemaSet> getSchemaSetsByDataspaceName(final String dataspaceName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final List<SchemaSetEntity> schemaSetEntities = schemaSetRepository.getByDataspace(dataspaceEntity); + return schemaSetEntities.stream() + .map(CpsModulePersistenceServiceImpl::toSchemaSet).collect(Collectors.toList()); + } + + @Override @Transactional // A retry is made to store the schema set if it fails because of duplicated yang resource exception that // can occur in case of specific concurrent requests. @@ -327,12 +337,14 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ */ private String getNameForChecksum( final String checksum, final Collection<YangResourceEntity> yangResourceEntities) { - return - yangResourceEntities.stream() + final Optional<String> optionalFileName = yangResourceEntities.stream() .filter(entity -> StringUtils.equals(checksum, (entity.getChecksum()))) .findFirst() - .map(YangResourceEntity::getFileName) - .orElse(null); + .map(YangResourceEntity::getFileName); + if (optionalFileName.isPresent()) { + return optionalFileName.get(); + } + return null; } /** @@ -364,4 +376,9 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ yangResourceEntity.getRevision(), yangResourceEntity.getContent()); } + + private static SchemaSet toSchemaSet(final SchemaSetEntity schemaSetEntity) { + return SchemaSet.builder().name(schemaSetEntity.getName()) + .dataspaceName(schemaSetEntity.getDataspace().getName()).build(); + } } diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentQueryBuilder.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentQueryBuilder.java new file mode 100644 index 0000000000..f107928ca7 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentQueryBuilder.java @@ -0,0 +1,139 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.repository; + +import java.util.HashMap; +import java.util.Map; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.Query; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.cpspath.parser.CpsPathPrefixType; +import org.onap.cps.cpspath.parser.CpsPathQuery; +import org.onap.cps.spi.entities.FragmentEntity; +import org.onap.cps.utils.JsonObjectMapper; +import org.springframework.stereotype.Component; + +@RequiredArgsConstructor +@Slf4j +@Component +public class FragmentQueryBuilder { + private static final String REGEX_ABSOLUTE_PATH_PREFIX = ".*\\/"; + private static final String REGEX_OPTIONAL_LIST_INDEX_POSTFIX = "(\\[@(?!.*\\[).*?])?"; + private static final String REGEX_DESCENDANT_PATH_POSTFIX = "(\\/.*)?"; + private static final String REGEX_END_OF_INPUT = "$"; + + @PersistenceContext + private EntityManager entityManager; + + private final JsonObjectMapper jsonObjectMapper; + + /** + * Create a sql query to retrieve by anchor(id) and cps path. + * + * @param anchorId the id of the anchor + * @param cpsPathQuery the cps path query to be transformed into a sql query + * @return a executable query object + */ + public Query getQueryForAnchorAndCpsPath(final int anchorId, final CpsPathQuery cpsPathQuery) { + final StringBuilder sqlStringBuilder = new StringBuilder("SELECT * FROM FRAGMENT WHERE anchor_id = :anchorId"); + final Map<String, Object> queryParameters = new HashMap<>(); + queryParameters.put("anchorId", anchorId); + sqlStringBuilder.append(" AND xpath ~ :xpathRegex"); + final String xpathRegex = getXpathSqlRegex(cpsPathQuery, false); + queryParameters.put("xpathRegex", xpathRegex); + if (cpsPathQuery.hasLeafConditions()) { + sqlStringBuilder.append(" AND attributes @> :leafDataAsJson\\:\\:jsonb"); + queryParameters.put("leafDataAsJson", jsonObjectMapper.asJsonString( + cpsPathQuery.getLeavesData())); + } + + addTextFunctionCondition(cpsPathQuery, sqlStringBuilder, queryParameters); + final Query query = entityManager.createNativeQuery(sqlStringBuilder.toString(), FragmentEntity.class); + setQueryParameters(query, queryParameters); + return query; + } + + /** + * Create a regular expression (string) for xpath based on the given cps path query. + * + * @param cpsPathQuery the cps path query to determine the required regular expression + * @param includeDescendants include descendants yes or no + * @return a string representing the required regular expression + */ + public static String getXpathSqlRegex(final CpsPathQuery cpsPathQuery, final boolean includeDescendants) { + final StringBuilder xpathRegexBuilder = new StringBuilder(); + if (CpsPathPrefixType.ABSOLUTE.equals(cpsPathQuery.getCpsPathPrefixType())) { + xpathRegexBuilder.append(escapeXpath(cpsPathQuery.getXpathPrefix())); + } else { + xpathRegexBuilder.append(REGEX_ABSOLUTE_PATH_PREFIX); + xpathRegexBuilder.append(escapeXpath(cpsPathQuery.getDescendantName())); + } + xpathRegexBuilder.append(REGEX_OPTIONAL_LIST_INDEX_POSTFIX); + if (includeDescendants) { + xpathRegexBuilder.append(REGEX_DESCENDANT_PATH_POSTFIX); + } + xpathRegexBuilder.append(REGEX_END_OF_INPUT); + return xpathRegexBuilder.toString(); + } + + private static String escapeXpath(final String xpath) { + // See https://jira.onap.org/browse/CPS-500 for limitations of this basic escape mechanism + return xpath.replace("[@", "\\[@"); + } + + private static Integer getTextValueAsInt(final CpsPathQuery cpsPathQuery) { + try { + return Integer.parseInt(cpsPathQuery.getTextFunctionConditionValue()); + } catch (final NumberFormatException e) { + return null; + } + } + + private static void addTextFunctionCondition(final CpsPathQuery cpsPathQuery, + final StringBuilder sqlStringBuilder, + final Map<String, Object> queryParameters) { + if (cpsPathQuery.hasTextFunctionCondition()) { + sqlStringBuilder.append(" AND ("); + sqlStringBuilder.append("attributes @> jsonb_build_object(:textLeafName, :textValue)"); + sqlStringBuilder + .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValue))"); + queryParameters.put("textLeafName", cpsPathQuery.getTextFunctionConditionLeafName()); + queryParameters.put("textValue", cpsPathQuery.getTextFunctionConditionValue()); + final Integer textValueAsInt = getTextValueAsInt(cpsPathQuery); + if (textValueAsInt != null) { + sqlStringBuilder.append(" OR attributes @> jsonb_build_object(:textLeafName, :textValueAsInt)"); + sqlStringBuilder + .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValueAsInt))"); + queryParameters.put("textValueAsInt", textValueAsInt); + } + sqlStringBuilder.append(")"); + } + } + + private static void setQueryParameters(final Query query, final Map<String, Object> queryParameters) { + for (final Map.Entry<String, Object> queryParameter : queryParameters.entrySet()) { + query.setParameter(queryParameter.getKey(), queryParameter.getValue()); + } + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java index 2c25a61a7e..4b42b2da8d 100755 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java @@ -39,7 +39,8 @@ import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository;
@Repository
-public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, FragmentRepositoryCpsPathQuery {
+public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, FragmentRepositoryCpsPathQuery,
+ FragmentRepositoryMultiPathQuery {
Optional<FragmentEntity> findByDataspaceAndAnchorAndXpath(@NonNull DataspaceEntity dataspaceEntity,
@NonNull AnchorEntity anchorEntity,
@@ -80,9 +81,6 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, return fragmentExtracts;
}
- List<FragmentEntity> findAllByAnchorAndXpathIn(@NonNull AnchorEntity anchorEntity,
- @NonNull Collection<String> xpath);
-
@Modifying
@Query("DELETE FROM FragmentEntity fe WHERE fe.anchor IN (:anchors)")
void deleteByAnchorIn(@NotNull @Param("anchors") Collection<AnchorEntity> anchorEntities);
@@ -94,4 +92,12 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, nativeQuery = true)
List<FragmentExtract> findByAnchorIdAndParentXpath(@Param("anchorId") int anchorId,
@Param("parentXpath") String parentXpath);
+
+ @Query(value = "SELECT id, anchor_id AS anchorId, xpath, parent_id AS parentId,"
+ + " CAST(attributes AS TEXT) AS attributes"
+ + " FROM FRAGMENT WHERE anchor_id = :anchorId"
+ + " AND xpath ~ :xpathRegex",
+ nativeQuery = true)
+ List<FragmentExtract> quickFindWithDescendants(@Param("anchorId") int anchorId,
+ @Param("xpathRegex") String xpathRegex);
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java index 1d61416cfd..6e8f05f017 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java @@ -20,103 +20,32 @@ package org.onap.cps.spi.repository; -import java.util.HashMap; import java.util.List; -import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.Query; import javax.transaction.Transactional; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.onap.cps.cpspath.parser.CpsPathPrefixType; import org.onap.cps.cpspath.parser.CpsPathQuery; import org.onap.cps.spi.entities.FragmentEntity; -import org.onap.cps.utils.JsonObjectMapper; @RequiredArgsConstructor @Slf4j public class FragmentRepositoryCpsPathQueryImpl implements FragmentRepositoryCpsPathQuery { - public static final String REGEX_ABSOLUTE_PATH_PREFIX = ".*\\/"; - public static final String REGEX_OPTIONAL_LIST_INDEX_POSTFIX = "(\\[@(?!.*\\[).*?])?$"; - @PersistenceContext private EntityManager entityManager; - private final JsonObjectMapper jsonObjectMapper; + + private final FragmentQueryBuilder fragmentQueryBuilder; @Override @Transactional public List<FragmentEntity> findByAnchorAndCpsPath(final int anchorId, final CpsPathQuery cpsPathQuery) { - final StringBuilder sqlStringBuilder = new StringBuilder("SELECT * FROM FRAGMENT WHERE anchor_id = :anchorId"); - final Map<String, Object> queryParameters = new HashMap<>(); - queryParameters.put("anchorId", anchorId); - sqlStringBuilder.append(" AND xpath ~ :xpathRegex"); - final String xpathRegex = getXpathSqlRegex(cpsPathQuery); - queryParameters.put("xpathRegex", xpathRegex); - if (cpsPathQuery.hasLeafConditions()) { - sqlStringBuilder.append(" AND attributes @> :leafDataAsJson\\:\\:jsonb"); - queryParameters.put("leafDataAsJson", jsonObjectMapper.asJsonString( - cpsPathQuery.getLeavesData())); - } - - addTextFunctionCondition(cpsPathQuery, sqlStringBuilder, queryParameters); - final Query query = entityManager.createNativeQuery(sqlStringBuilder.toString(), FragmentEntity.class); - setQueryParameters(query, queryParameters); + final Query query = fragmentQueryBuilder.getQueryForAnchorAndCpsPath(anchorId, cpsPathQuery); final List<FragmentEntity> fragmentEntities = query.getResultList(); log.debug("Fetched {} fragment entities by anchor and cps path.", fragmentEntities.size()); return fragmentEntities; } - private static String getXpathSqlRegex(final CpsPathQuery cpsPathQuery) { - final StringBuilder xpathRegexBuilder = new StringBuilder(); - if (CpsPathPrefixType.ABSOLUTE.equals(cpsPathQuery.getCpsPathPrefixType())) { - xpathRegexBuilder.append(escapeXpath(cpsPathQuery.getXpathPrefix())); - } else { - xpathRegexBuilder.append(REGEX_ABSOLUTE_PATH_PREFIX); - xpathRegexBuilder.append(escapeXpath(cpsPathQuery.getDescendantName())); - } - xpathRegexBuilder.append(REGEX_OPTIONAL_LIST_INDEX_POSTFIX); - return xpathRegexBuilder.toString(); - } - - private static String escapeXpath(final String xpath) { - // See https://jira.onap.org/browse/CPS-500 for limitations of this basic escape mechanism - return xpath.replace("[@", "\\[@"); - } - - private static Integer getTextValueAsInt(final CpsPathQuery cpsPathQuery) { - try { - return Integer.parseInt(cpsPathQuery.getTextFunctionConditionValue()); - } catch (final NumberFormatException e) { - return null; - } - } - - private static void addTextFunctionCondition(final CpsPathQuery cpsPathQuery, final StringBuilder sqlStringBuilder, - final Map<String, Object> queryParameters) { - if (cpsPathQuery.hasTextFunctionCondition()) { - sqlStringBuilder.append(" AND ("); - sqlStringBuilder.append("attributes @> jsonb_build_object(:textLeafName, :textValue)"); - sqlStringBuilder - .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValue))"); - queryParameters.put("textLeafName", cpsPathQuery.getTextFunctionConditionLeafName()); - queryParameters.put("textValue", cpsPathQuery.getTextFunctionConditionValue()); - final Integer textValueAsInt = getTextValueAsInt(cpsPathQuery); - if (textValueAsInt != null) { - sqlStringBuilder.append(" OR attributes @> jsonb_build_object(:textLeafName, :textValueAsInt)"); - sqlStringBuilder - .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValueAsInt))"); - queryParameters.put("textValueAsInt", textValueAsInt); - } - sqlStringBuilder.append(")"); - } - } - - private static void setQueryParameters(final Query query, final Map<String, Object> queryParameters) { - for (final Map.Entry<String, Object> queryParameter : queryParameters.entrySet()) { - query.setParameter(queryParameter.getKey(), queryParameter.getValue()); - } - } - } diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQuery.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQuery.java new file mode 100644 index 0000000000..9c34a459e9 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQuery.java @@ -0,0 +1,31 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.repository; + +import java.util.Collection; +import java.util.List; +import org.onap.cps.spi.entities.FragmentEntity; + +public interface FragmentRepositoryMultiPathQuery { + + List<FragmentEntity> findByAnchorAndMultipleCpsPaths(Integer anchorId, Collection<String> cpsPathQuery); + +} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQueryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQueryImpl.java new file mode 100644 index 0000000000..8c357bbb31 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQueryImpl.java @@ -0,0 +1,74 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.repository; + + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.transaction.Transactional; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.spi.entities.FragmentEntity; + + +@Slf4j +@AllArgsConstructor +public class FragmentRepositoryMultiPathQueryImpl implements FragmentRepositoryMultiPathQuery { + + @PersistenceContext + private EntityManager entityManager; + + private TempTableCreator tempTableCreator; + + @Override + @Transactional + public List<FragmentEntity> findByAnchorAndMultipleCpsPaths(final Integer anchorId, + final Collection<String> cpsPathQueryList) { + if (cpsPathQueryList.isEmpty()) { + return Collections.emptyList(); + } + final Collection<List<String>> sqlData = new HashSet<>(cpsPathQueryList.size()); + for (final String query : cpsPathQueryList) { + final List<String> row = new ArrayList<>(1); + row.add(query); + sqlData.add(row); + } + + final String tempTableName = tempTableCreator.createTemporaryTable( + "xpathTemporaryTable", sqlData, "xpath"); + return selectMatchingFragments(anchorId, tempTableName); + } + + private List<FragmentEntity> selectMatchingFragments(final Integer anchorId, final String tempTableName) { + final String sql = String.format( + "SELECT * FROM FRAGMENT WHERE anchor_id = %d AND xpath IN (select xpath FROM %s);", + anchorId, tempTableName); + final List<FragmentEntity> fragmentEntities = entityManager.createNativeQuery(sql, FragmentEntity.class) + .getResultList(); + log.debug("Fetched {} fragment entities by anchor and cps path.", fragmentEntities.size()); + return fragmentEntities; + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java index 5e4de7fec4..00e53aa00f 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java @@ -28,6 +28,5 @@ import org.onap.cps.spi.model.ModuleReference; */ public interface ModuleReferenceQuery { - Collection<ModuleReference> identifyNewModuleReferences( - final Collection<ModuleReference> moduleReferencesToCheck); + Collection<ModuleReference> identifyNewModuleReferences(final Collection<ModuleReference> moduleReferencesToCheck); } diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java index f70e218373..ef701bc7dc 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java @@ -20,16 +20,10 @@ package org.onap.cps.spi.repository; -import java.util.Collection; import org.onap.cps.spi.entities.YangResourceEntity; -import org.onap.cps.spi.model.ModuleReference; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; @Repository -public interface ModuleReferenceRepository extends JpaRepository<YangResourceEntity, Long>, ModuleReferenceQuery { +public interface ModuleReferenceRepository extends JpaRepository<YangResourceEntity, Long>, ModuleReferenceQuery {} - Collection<ModuleReference> identifyNewModuleReferences( - final Collection<ModuleReference> moduleReferencesToCheck); - -}
\ No newline at end of file diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java index 681bbcddec..48982d51ff 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java @@ -23,8 +23,8 @@ package org.onap.cps.spi.repository; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; -import java.util.UUID; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import lombok.AllArgsConstructor; @@ -41,6 +41,8 @@ public class ModuleReferenceRepositoryImpl implements ModuleReferenceQuery { @PersistenceContext private EntityManager entityManager; + private TempTableCreator tempTableCreator; + @Override @SneakyThrows public Collection<ModuleReference> identifyNewModuleReferences( @@ -50,42 +52,18 @@ public class ModuleReferenceRepositoryImpl implements ModuleReferenceQuery { return Collections.emptyList(); } - final String tempTableName = "moduleReferencesToCheckTemp" - + UUID.randomUUID().toString().replace("-", ""); - - createTemporaryTable(tempTableName); - insertDataIntoTable(tempTableName, moduleReferencesToCheck); - - return identifyNewModuleReferencesForCmHandle(tempTableName); - } - - private void createTemporaryTable(final String tempTableName) { - final StringBuilder sqlStringBuilder = new StringBuilder("CREATE TEMPORARY TABLE " + tempTableName + "("); - sqlStringBuilder.append(" id INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,"); - sqlStringBuilder.append(" module_name varchar NOT NULL,"); - sqlStringBuilder.append(" revision varchar NOT NULL"); - sqlStringBuilder.append(");"); - - entityManager.createNativeQuery(sqlStringBuilder.toString()).executeUpdate(); - } - - private void insertDataIntoTable(final String tempTableName, final Collection<ModuleReference> moduleReferences) { - final StringBuilder sqlStringBuilder = new StringBuilder("INSERT INTO " + tempTableName); - sqlStringBuilder.append(" (module_name, revision) "); - sqlStringBuilder.append(" VALUES "); - - for (final ModuleReference moduleReference : moduleReferences) { - sqlStringBuilder.append("('"); - sqlStringBuilder.append(moduleReference.getModuleName()); - sqlStringBuilder.append("', '"); - sqlStringBuilder.append(moduleReference.getRevision()); - sqlStringBuilder.append("'),"); + final Collection<List<String>> sqlData = new HashSet<>(moduleReferencesToCheck.size()); + for (final ModuleReference moduleReference : moduleReferencesToCheck) { + final List<String> row = new ArrayList<>(2); + row.add(moduleReference.getModuleName()); + row.add(moduleReference.getRevision()); + sqlData.add(row); } - // replace last ',' with ';' - sqlStringBuilder.replace(sqlStringBuilder.length() - 1, sqlStringBuilder.length(), ";"); + final String tempTableName = tempTableCreator.createTemporaryTable( + "moduleReferencesToCheckTemp", sqlData, "module_name", "revision"); - entityManager.createNativeQuery(sqlStringBuilder.toString()).executeUpdate(); + return identifyNewModuleReferencesForCmHandle(tempTableName); } private Collection<ModuleReference> identifyNewModuleReferencesForCmHandle(final String tempTableName) { diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java index a15ce622c2..8cecb0a8e3 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java @@ -1,6 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2020 Pantheon.tech + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,7 +20,10 @@ package org.onap.cps.spi.repository; +import java.util.Collection; +import java.util.List; import java.util.Optional; +import java.util.stream.Collectors; import javax.validation.constraints.NotNull; import org.onap.cps.spi.entities.DataspaceEntity; import org.onap.cps.spi.entities.SchemaSetEntity; @@ -33,6 +37,13 @@ public interface SchemaSetRepository extends JpaRepository<SchemaSetEntity, Inte Optional<SchemaSetEntity> findByDataspaceAndName(@NotNull DataspaceEntity dataspaceEntity, @NotNull String schemaSetName); + /** + * Gets schema sets by dataspace. + * @param dataspaceEntity dataspace entity + * @return list of schema set entity + */ + Collection<SchemaSetEntity> findByDataspace(@NotNull DataspaceEntity dataspaceEntity); + Integer countByDataspace(@NotNull DataspaceEntity dataspaceEntity); /** @@ -48,4 +59,15 @@ public interface SchemaSetRepository extends JpaRepository<SchemaSetEntity, Inte return findByDataspaceAndName(dataspaceEntity, schemaSetName) .orElseThrow(() -> new SchemaSetNotFoundException(dataspaceEntity.getName(), schemaSetName)); } + + /** + * Gets all schema sets for a given dataspace. + * + * @param dataspaceEntity dataspace entity + * @return list of schema set entity + * @throws SchemaSetNotFoundException if SchemaSet not found + */ + default List<SchemaSetEntity> getByDataspace(@NotNull final DataspaceEntity dataspaceEntity) { + return findByDataspace(dataspaceEntity).stream().collect(Collectors.toList()); + } } diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java new file mode 100644 index 0000000000..d713746e45 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java @@ -0,0 +1,104 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.repository; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +@Slf4j +@Transactional +@AllArgsConstructor +@Component +public class TempTableCreator { + + @PersistenceContext + private EntityManager entityManager; + + /** + * Create a uniquely named temporary table. + * + * @param prefix prefix for the table name (so you can recognize it) + * @param sqlData data to insert (strings only) the inner List present a row of data + * @param columnNames column names (in same order as data in rows in sqlData) + * @return a unique temporary table name with given prefix + */ + public String createTemporaryTable(final String prefix, + final Collection<List<String>> sqlData, + final String... columnNames) { + final String tempTableName = prefix + UUID.randomUUID().toString().replace("-", ""); + final StringBuilder sqlStringBuilder = new StringBuilder("CREATE TEMPORARY TABLE "); + sqlStringBuilder.append(tempTableName); + defineColumns(sqlStringBuilder, columnNames); + insertData(sqlStringBuilder, tempTableName, columnNames, sqlData); + entityManager.createNativeQuery(sqlStringBuilder.toString()).executeUpdate(); + return tempTableName; + } + + private static void defineColumns(final StringBuilder sqlStringBuilder, final String[] columnNames) { + sqlStringBuilder.append('('); + final Iterator<String> it = Arrays.stream(columnNames).iterator(); + while (it.hasNext()) { + final String columnName = it.next(); + sqlStringBuilder.append(" "); + sqlStringBuilder.append(columnName); + sqlStringBuilder.append(" varchar NOT NULL"); + if (it.hasNext()) { + sqlStringBuilder.append(","); + } + } + sqlStringBuilder.append(");"); + } + + private static void insertData(final StringBuilder sqlStringBuilder, + final String tempTableName, + final String[] columnNames, + final Collection<List<String>> sqlData) { + final Collection<String> sqlInserts = new HashSet<>(sqlData.size()); + for (final Collection<String> rowValues : sqlData) { + final Collection<String> escapedValues = + rowValues.stream().map(it -> escapeSingleQuotesByDoublingThem(it)).collect(Collectors.toList()); + sqlInserts.add("('" + String.join("','", escapedValues) + "')"); + } + sqlStringBuilder.append("INSERT INTO "); + sqlStringBuilder.append(tempTableName); + sqlStringBuilder.append(" ("); + sqlStringBuilder.append(String.join(",", columnNames)); + sqlStringBuilder.append(") VALUES "); + sqlStringBuilder.append(String.join(",", sqlInserts)); + sqlStringBuilder.append(";"); + } + + private static String escapeSingleQuotesByDoublingThem(final String value) { + return value.replace("'", "''"); + } + +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy index 56e388335e..b6d2c5d65e 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy @@ -26,6 +26,8 @@ import org.onap.cps.spi.exceptions.CpsPathException import org.springframework.beans.factory.annotation.Autowired import org.springframework.test.context.jdbc.Sql +import java.util.stream.Collectors + import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS @@ -147,27 +149,30 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase { def result = objectUnderTest.queryDataNodes(DATASPACE_NAME, ANCHOR_FOR_SHOP_EXAMPLE, cpsPath, INCLUDE_ALL_DESCENDANTS) then: 'the xpaths of the retrieved data nodes are as expected' result.size() == expectedXPaths.size() - for (int i = 0; i < result.size(); i++) { - assert result[i].getXpath() == expectedXPaths[i] - assert result[i].childDataNodes.size() == expectedNumberOfChildren[i] + if (result.size() > 0) { + def resultXpaths = result.stream().map(it -> it.xpath).collect(Collectors.toSet()) + resultXpaths.containsAll(expectedXPaths) + result.each { + assert it.childDataNodes.size() == expectedNumberOfChildren + } } where: 'the following data is used' scenario | cpsPath || expectedXPaths || expectedNumberOfChildren - 'multiple list-ancestors' | '//book/ancestor::categories' || ["/shops/shop[@id='1']/categories[@code='1']", "/shops/shop[@id='1']/categories[@code='2']"] || [1, 1] - 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ["/shops/shop[@id='1']/categories[@code='1']"] || [1] - 'top ancestor' | '//shop[@id=1]/ancestor::shops' || ['/shops'] || [5] - 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ["/shops/shop[@id='1']"] || [3] - 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']"] || [1] - 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ["/shops/shop[@id='3']/info/contact"] || [3] - 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ["/shops/shop[@id='1']"] || [3] - 'ancestor with parent that does not exist' | '//book/ancestor::parentDoesNoExist/categories' || [] || [] - 'ancestor does not exist' | '//book/ancestor::ancestorDoesNotExist' || [] || [] + 'multiple list-ancestors' | '//book/ancestor::categories' || ["/shops/shop[@id='1']/categories[@code='2']", "/shops/shop[@id='1']/categories[@code='1']"] || 1 + 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ["/shops/shop[@id='1']/categories[@code='1']"] || 1 + 'top ancestor' | '//shop[@id=1]/ancestor::shops' || ['/shops'] || 5 + 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ["/shops/shop[@id='1']"] || 3 + 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']"] || 1 + 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ["/shops/shop[@id='3']/info/contact"] || 3 + 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ["/shops/shop[@id='1']"] || 3 + 'ancestor with parent that does not exist' | '//book/ancestor::parentDoesNoExist/categories' || [] || null + 'ancestor does not exist' | '//book/ancestor::ancestorDoesNotExist' || [] || null } def 'Cps Path query with syntax error throws a CPS Path Exception.'() { when: 'trying to execute a query with a syntax (parsing) error' objectUnderTest.queryDataNodes(DATASPACE_NAME, ANCHOR_FOR_SHOP_EXAMPLE, 'cpsPath that cannot be parsed' , OMIT_DESCENDANTS) - then: 'exception is thrown' + then: 'a cps path exception is thrown' thrown(CpsPathException) } diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy index fbf414d2ad..6252fff56c 100755 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy @@ -1,8 +1,9 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2022 Nordix Foundation + * Copyright (C) 2021-2023 Nordix Foundation * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021-2022 Bell Canada. + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,7 +27,6 @@ import com.google.common.collect.ImmutableSet import org.onap.cps.cpspath.parser.PathParsingException import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.spi.entities.FragmentEntity -import org.onap.cps.spi.exceptions.AlreadyDefinedException import org.onap.cps.spi.exceptions.AlreadyDefinedExceptionBatch import org.onap.cps.spi.exceptions.AnchorNotFoundException import org.onap.cps.spi.exceptions.CpsAdminException @@ -38,6 +38,7 @@ import org.onap.cps.spi.model.DataNodeBuilder import org.onap.cps.utils.JsonObjectMapper import org.springframework.beans.factory.annotation.Autowired import org.springframework.test.context.jdbc.Sql + import javax.validation.ConstraintViolationException import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS @@ -48,25 +49,29 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { @Autowired CpsDataPersistenceService objectUnderTest - static final JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) - static final DataNodeBuilder dataNodeBuilder = new DataNodeBuilder() + static JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) + static DataNodeBuilder dataNodeBuilder = new DataNodeBuilder() static final String SET_DATA = '/data/fragment.sql' - static final int DATASPACE_1001_ID = 1001L - static final int ANCHOR_3003_ID = 3003L - static final long ID_DATA_NODE_WITH_DESCENDANTS = 4001 - static final String XPATH_DATA_NODE_WITH_DESCENDANTS = '/parent-1' - static final String XPATH_DATA_NODE_WITH_LEAVES = '/parent-207' - static final long DATA_NODE_202_FRAGMENT_ID = 4202L - static final long CHILD_OF_DATA_NODE_202_FRAGMENT_ID = 4203L - static final long LIST_DATA_NODE_PARENT201_FRAGMENT_ID = 4206L - static final long LIST_DATA_NODE_PARENT203_FRAGMENT_ID = 4214L - static final long LIST_DATA_NODE_PARENT202_FRAGMENT_ID = 4211L - static final long PARENT_3_FRAGMENT_ID = 4003L - - static final DataNode newDataNode = new DataNodeBuilder().build() - static DataNode existingDataNode - static DataNode existingChildDataNode + static int DATASPACE_1001_ID = 1001L + static int ANCHOR_3003_ID = 3003L + static long ID_DATA_NODE_WITH_DESCENDANTS = 4001 + static String XPATH_DATA_NODE_WITH_DESCENDANTS = '/parent-1' + static String XPATH_DATA_NODE_WITH_LEAVES = '/parent-207' + static long DATA_NODE_202_FRAGMENT_ID = 4202L + static long CHILD_OF_DATA_NODE_202_FRAGMENT_ID = 4203L + static long LIST_DATA_NODE_PARENT201_FRAGMENT_ID = 4206L + static long LIST_DATA_NODE_PARENT203_FRAGMENT_ID = 4214L + static long LIST_DATA_NODE_PARENT202_FRAGMENT_ID = 4211L + static long PARENT_3_FRAGMENT_ID = 4003L + + static Collection<DataNode> newDataNodes = [new DataNodeBuilder().build()] + static Collection<DataNode> existingDataNodes = [createDataNodeTree(XPATH_DATA_NODE_WITH_DESCENDANTS)] + static Collection<DataNode> existingChildDataNodes = [createDataNodeTree('/parent-1/child-1')] + + def static deleteTestParentXPath = '/parent-200' + def static deleteTestChildXpath = "${deleteTestParentXPath}/child-with-slash[@key='a/b']" + def static deleteTestGrandChildXPath = "${deleteTestChildXpath}/grandChild" def expectedLeavesByXpathMap = [ '/parent-207' : ['parent-leaf': 'parent-leaf value'], @@ -75,11 +80,6 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { '/parent-207/child-002/grand-child': ['grand-child-leaf': 'grand-child-leaf value'] ] - static { - existingDataNode = createDataNodeTree(XPATH_DATA_NODE_WITH_DESCENDANTS) - existingChildDataNode = createDataNodeTree('/parent-1/child-1') - } - @Sql([CLEAR_DATA, SET_DATA]) def 'Get existing datanode with descendants.'() { when: 'the node is retrieved by its xpath' @@ -93,13 +93,13 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) - def 'Storing and Retrieving a new DataNode with descendants.'() { + def 'Storing and Retrieving a new DataNodes with descendants.'() { when: 'a fragment with descendants is stored' def parentXpath = '/parent-new' def childXpath = '/parent-new/child-new' def grandChildXpath = '/parent-new/child-new/grandchild-new' - objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1, - createDataNodeTree(parentXpath, childXpath, grandChildXpath)) + def dataNodes = [createDataNodeTree(parentXpath, childXpath, grandChildXpath)] + objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME1, dataNodes) then: 'it can be retrieved by its xpath' def dataNode = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, INCLUDE_ALL_DESCENDANTS) assert dataNode.xpath == parentXpath @@ -117,9 +117,9 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { def 'Store data node for multiple anchors using the same schema.'() { def xpath = '/parent-new' given: 'a fragment is stored for an anchor' - objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1, createDataNodeTree(xpath)) + objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME1, [createDataNodeTree(xpath)]) when: 'another fragment is stored for an other anchor, using the same schema set' - objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME3, createDataNodeTree(xpath)) + objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME3, [createDataNodeTree(xpath)]) then: 'both fragments can be retrieved by their xpath' def fragment1 = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME1, xpath) fragment1.anchor.name == ANCHOR_NAME1 @@ -130,45 +130,48 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) - def 'Store datanode error scenario: #scenario.'() { + def 'Store datanodes error scenario: #scenario.'() { when: 'attempt to store a data node with #scenario' - objectUnderTest.storeDataNode(dataspaceName, anchorName, dataNode) + objectUnderTest.storeDataNodes(dataspaceName, anchorName, dataNodes) then: 'a #expectedException is thrown' thrown(expectedException) where: 'the following data is used' - scenario | dataspaceName | anchorName | dataNode || expectedException - 'dataspace does not exist' | 'unknown' | 'not-relevant' | newDataNode || DataspaceNotFoundException - 'schema set does not exist' | DATASPACE_NAME | 'unknown' | newDataNode || AnchorNotFoundException - 'anchor already exists' | DATASPACE_NAME | ANCHOR_NAME1 | newDataNode || ConstraintViolationException - 'datanode already exists' | DATASPACE_NAME | ANCHOR_NAME1 | existingDataNode || AlreadyDefinedException + scenario | dataspaceName | anchorName | dataNodes || expectedException + 'dataspace does not exist' | 'unknown' | 'not-relevant' | newDataNodes || DataspaceNotFoundException + 'schema set does not exist' | DATASPACE_NAME | 'unknown' | newDataNodes || AnchorNotFoundException + 'anchor already exists' | DATASPACE_NAME | ANCHOR_NAME1 | newDataNodes || ConstraintViolationException + 'datanode already exists' | DATASPACE_NAME | ANCHOR_NAME1 | existingDataNodes || AlreadyDefinedExceptionBatch } @Sql([CLEAR_DATA, SET_DATA]) - def 'Add a child to a Fragment that already has a child.'() { - given: ' a new child node' - def newChild = createDataNodeTree('xpath for new child') + def 'Add children to a Fragment that already has a child.'() { + given: 'collection of new child data nodes' + def newChild1 = createDataNodeTree('/parent-1/child-2') + def newChild2 = createDataNodeTree('/parent-1/child-3') + def newChildrenCollection = [newChild1, newChild2] when: 'the child is added to an existing parent with 1 child' - objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, newChild) - then: 'the parent is now has to 2 children' + objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, newChildrenCollection) + then: 'the parent is now has to 3 children' def expectedExistingChildPath = '/parent-1/child-1' def parentFragment = fragmentRepository.findById(ID_DATA_NODE_WITH_DESCENDANTS).orElseThrow() - parentFragment.childFragments.size() == 2 + parentFragment.childFragments.size() == 3 and: 'it still has the old child' parentFragment.childFragments.find({ it.xpath == expectedExistingChildPath }) - and: 'it has the new child' - parentFragment.childFragments.find({ it.xpath == newChild.xpath }) + and: 'it has the new children' + parentFragment.childFragments.find({ it.xpath == newChildrenCollection[0].xpath }) + parentFragment.childFragments.find({ it.xpath == newChildrenCollection[1].xpath }) } @Sql([CLEAR_DATA, SET_DATA]) def 'Add child error scenario: #scenario.'() { when: 'attempt to add a child data node with #scenario' - objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, dataNode) + objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, dataNodes) then: 'a #expectedException is thrown' thrown(expectedException) where: 'the following data is used' - scenario | parentXpath | dataNode || expectedException - 'parent does not exist' | '/unknown' | newDataNode || DataNodeNotFoundException - 'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNode || AlreadyDefinedException + scenario | parentXpath | dataNodes || expectedException + 'parent does not exist' | '/unknown' | newDataNodes || DataNodeNotFoundException + 'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNodes || AlreadyDefinedExceptionBatch } @Sql([CLEAR_DATA, SET_DATA]) @@ -288,7 +291,41 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { scenario | dataspaceName | anchorName | xpath || expectedException 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException - 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NO XPATH' || DataNodeNotFoundException + 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NO-XPATH' || DataNodeNotFoundException + 'invalid xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'INVALID XPATH' || CpsPathException + } + + @Sql([CLEAR_DATA, SET_DATA]) + def 'Get multiple data nodes by xpath.'() { + when: 'fetch #scenario.' + def results = objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME3, inputXpaths, OMIT_DESCENDANTS) + then: 'the expected number of data nodes are returned' + assert results.size() == expectedResultSize + where: 'following parameters were used' + scenario | inputXpaths || expectedResultSize + '1 node' | ["/parent-200"] || 1 + '2 unique nodes' | ["/parent-200", "/parent-201"] || 2 + '3 unique nodes' | ["/parent-200", "/parent-201", "/parent-202"] || 3 + '1 unique node with duplicate xpath' | ["/parent-200", "/parent-200"] || 1 + '2 unique nodes with duplicate xpath' | ["/parent-200", "/parent-202", "/parent-200"] || 2 + 'list element with key (single quote)' | ["/parent-201/child-204[@key='A']"] || 1 + 'list element with key (double quote)' | ['/parent-201/child-204[@key="A"]'] || 1 + 'non-existing xpath' | ["/NO-XPATH"] || 0 + 'existing and non-existing xpaths' | ["/parent-200", "/NO-XPATH", "/parent-201"] || 2 + 'invalid xpath' | ["INVALID XPATH"] || 0 + 'valid and invalid xpaths' | ["/parent-200", "INVALID XPATH", "/parent-201"] || 2 + } + + @Sql([CLEAR_DATA, SET_DATA]) + def 'Get multiple data nodes error scenario: #scenario.'() { + when: 'attempt to get data nodes with #scenario' + objectUnderTest.getDataNodes(dataspaceName, anchorName, ['/not-relevant'], OMIT_DESCENDANTS) + then: 'a #expectedException is thrown' + thrown(expectedException) + where: 'the following data is used' + scenario | dataspaceName | anchorName || expectedException + 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' || DataspaceNotFoundException + 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' || AnchorNotFoundException } @Sql([CLEAR_DATA, SET_DATA]) @@ -318,7 +355,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { scenario | dataspaceName | anchorName | xpath || expectedException 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException - 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException + 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING-XPATH' || DataNodeNotFoundException } @Sql([CLEAR_DATA, SET_DATA]) @@ -412,7 +449,8 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { scenario | dataspaceName | anchorName | xpath || expectedException 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException - 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException + 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING-XPATH' || DataNodeNotFoundException + 'invalid xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'INVALID XPATH' || CpsPathException } @Sql([CLEAR_DATA, SET_DATA]) @@ -525,6 +563,25 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) + def 'Delete data nodes with "/"-token in list key value: #scenario. (CPS-1409)'() { + given: 'a data nodes with list-element child with "/" in index value (and grandchild)' + def grandChild = new DataNodeBuilder().withXpath(deleteTestGrandChildXPath).build() + def child = new DataNodeBuilder().withXpath(deleteTestChildXpath).withChildDataNodes([grandChild]).build() + objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME3, deleteTestParentXPath, child) + and: 'number of children before delete is stored' + def numberOfChildrenBeforeDelete = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_NAME3, pathToParentOfDeletedNode, INCLUDE_ALL_DESCENDANTS).childDataNodes.size() + when: 'target node is deleted' + objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, deleteTarget) + then: 'one child has been deleted' + def numberOfChildrenAfterDelete = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_NAME3, pathToParentOfDeletedNode, INCLUDE_ALL_DESCENDANTS).childDataNodes.size() + assert numberOfChildrenAfterDelete == numberOfChildrenBeforeDelete - 1 + where: + scenario | deleteTarget | pathToParentOfDeletedNode + 'list element with /' | deleteTestChildXpath | deleteTestParentXPath + 'child of list element' | deleteTestGrandChildXPath | deleteTestChildXpath + } + + @Sql([CLEAR_DATA, SET_DATA]) def 'Delete list error scenario: #scenario.'() { when: 'attempting to delete scenario: #scenario.' objectUnderTest.deleteListDataNode(DATASPACE_NAME, ANCHOR_NAME3, targetXpaths) @@ -541,7 +598,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) - def 'Confirm deletion of #scenario.'() { + def 'Delete data node by xpath #scenario.'() { given: 'a valid data node' def dataNode and: 'data nodes are deleted' @@ -566,7 +623,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) - def 'Delete data node with #scenario.'() { + def 'Delete data node error scenario: #scenario.'() { when: 'data node is deleted' objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, datanodeXpath) then: 'a #expectedException is thrown' diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy index e69cbee471..87e59c60dc 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy @@ -1,7 +1,8 @@ /* * ============LICENSE_START======================================================= * Copyright (c) 2021 Bell Canada. - * Modifications Copyright (C) 2021-2022 Nordix Foundation + * Modifications Copyright (C) 2021-2023 Nordix Foundation + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +35,7 @@ import org.onap.cps.spi.repository.DataspaceRepository import org.onap.cps.spi.repository.FragmentRepository import org.onap.cps.spi.utils.SessionManager import org.onap.cps.utils.JsonObjectMapper +import org.springframework.dao.DataIntegrityViolationException import spock.lang.Specification class CpsDataPersistenceServiceSpec extends Specification { @@ -44,7 +46,28 @@ class CpsDataPersistenceServiceSpec extends Specification { def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) def mockSessionManager = Mock(SessionManager) - def objectUnderTest = new CpsDataPersistenceServiceImpl(mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper, mockSessionManager) + def objectUnderTest = Spy(new CpsDataPersistenceServiceImpl(mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper, mockSessionManager)) + + def 'Storing data nodes individually when batch operation fails'(){ + given: 'two data nodes and supporting repository mock behavior' + def dataNode1 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath1','OK') + def dataNode2 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath2','OK') + and: 'the batch store operation will fail' + mockFragmentRepository.saveAll(*_) >> { throw new DataIntegrityViolationException("Exception occurred") } + when: 'trying to store data nodes' + objectUnderTest.storeDataNodes('dataSpaceName', 'anchorName', [dataNode1, dataNode2]) + then: 'the two data nodes are saved individually' + 2 * mockFragmentRepository.save(_); + } + + def 'Store single data node.'() { + given: 'a data node' + def dataNode = new DataNode() + when: 'storing a single data node' + objectUnderTest.storeDataNode('dataspace1', 'anchor1', dataNode) + then: 'the call is redirected to storing a collection of data nodes with just the given data node' + 1 * objectUnderTest.storeDataNodes('dataspace1', 'anchor1', [dataNode]) + } def 'Handling of StaleStateException (caused by concurrent updates) during update data node and descendants.'() { given: 'the fragment repository returns a fragment entity' @@ -66,10 +89,10 @@ class CpsDataPersistenceServiceSpec extends Specification { def 'Handling of StaleStateException (caused by concurrent updates) during update data nodes and descendants.'() { given: 'the system contains and can update one datanode' - def dataNode1 = mockDataNodeAndFragmentEntity('/node1', 'OK') + def dataNode1 = createDataNodeAndMockRepositoryMethodSupportingIt('/node1', 'OK') and: 'the system contains two more datanodes that throw an exception while updating' - def dataNode2 = mockDataNodeAndFragmentEntity('/node2', 'EXCEPTION') - def dataNode3 = mockDataNodeAndFragmentEntity('/node3', 'EXCEPTION') + def dataNode2 = createDataNodeAndMockRepositoryMethodSupportingIt('/node2', 'EXCEPTION') + def dataNode3 = createDataNodeAndMockRepositoryMethodSupportingIt('/node3', 'EXCEPTION') and: 'the batch update will therefore also fail' mockFragmentRepository.saveAll(*_) >> { throw new StaleStateException("concurrent updates") } when: 'attempt batch update data nodes' @@ -84,7 +107,6 @@ class CpsDataPersistenceServiceSpec extends Specification { assert thrown.details.contains('/node3') } - def 'Retrieving a data node with a property JSON value of #scenario'() { given: 'the db has a fragment with an attribute property JSON value of #scenario' mockFragmentWithJson("{\"some attribute\": ${dataString}}") @@ -119,6 +141,20 @@ class CpsDataPersistenceServiceSpec extends Specification { thrown(DataValidationException) } + def 'Retrieving multiple data nodes.'() { + given: 'db contains an anchor' + def anchorEntity = new AnchorEntity(id:123) + mockAnchorRepository.getByDataspaceAndName(*_) >> anchorEntity + and: 'fragment repository returns a collection of fragments' + def fragmentEntity1 = new FragmentEntity(xpath: '/xpath1', childFragments: []) + def fragmentEntity2 = new FragmentEntity(xpath: '/xpath2', childFragments: []) + mockFragmentRepository.findByAnchorAndMultipleCpsPaths(123, ['/xpath1', '/xpath2'] as Set<String>) >> [fragmentEntity1, fragmentEntity2] + when: 'getting data nodes for 2 xpaths' + def result = objectUnderTest.getDataNodes('some-dataspace', 'some-anchor', ['/xpath1', '/xpath2'], FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + then: '2 data nodes are returned' + assert result.size() == 2 + } + def 'start session'() { when: 'start session' objectUnderTest.startSession() @@ -142,6 +178,25 @@ class CpsDataPersistenceServiceSpec extends Specification { 1 * mockSessionManager.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L) } + def 'update data node leaves: #scenario'(){ + given: 'A node exists for the given xpath' + mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, '/some/xpath') >> new FragmentEntity(xpath: '/some/xpath', attributes: existingAttributes) + when: 'the node leaves are updated' + objectUnderTest.updateDataLeaves('some-dataspace', 'some-anchor', '/some/xpath', newAttributes as Map<String, Serializable>) + then: 'the fragment entity saved has the original and new attributes' + 1 * mockFragmentRepository.save({fragmentEntity -> { + assert fragmentEntity.getXpath() == '/some/xpath' + assert fragmentEntity.getAttributes() == mergedAttributes + }}) + where: 'the following attributes combinations are used' + scenario | existingAttributes | newAttributes | mergedAttributes + 'add new leaf' | '{"existing":"value"}' | ["new":"value"] | '{"existing":"value","new":"value"}' + 'update existing leaf' | '{"existing":"value"}' | ["existing":"value2"] | '{"existing":"value2"}' + 'update nothing with nothing' | '' | [] | '' + 'update with nothing' | '{"existing":"value"}' | [] | '{"existing":"value"}' + 'update with same value' | '{"existing":"value"}' | ["existing":"value"] | '{"existing":"value"}' + } + def 'update data node and descendants: #scenario'(){ given: 'mocked responses' mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, '/test/xpath') >> new FragmentEntity(xpath: '/test/xpath', childFragments: []) @@ -174,7 +229,7 @@ class CpsDataPersistenceServiceSpec extends Specification { }}) } - def mockDataNodeAndFragmentEntity(xpath, scenario) { + def createDataNodeAndMockRepositoryMethodSupportingIt(xpath, scenario) { def dataNode = new DataNodeBuilder().withXpath(xpath).build() def fragmentEntity = new FragmentEntity(xpath: xpath, childFragments: []) mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, xpath) >> fragmentEntity @@ -185,11 +240,8 @@ class CpsDataPersistenceServiceSpec extends Specification { } def mockFragmentWithJson(json) { - def anchorName = 'some anchor' - def mockAnchor = Mock(AnchorEntity) - mockAnchor.getId() >> 123 - mockAnchor.getName() >> anchorName - mockAnchorRepository.getByDataspaceAndName(*_) >> mockAnchor + def anchorEntity = new AnchorEntity(id:123) + mockAnchorRepository.getByDataspaceAndName(*_) >> anchorEntity def mockFragmentExtract = Mock(FragmentExtract) mockFragmentExtract.getId() >> 456 mockFragmentExtract.getAttributes() >> json diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy index f9ebc52f18..4c67f7e972 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy @@ -2,6 +2,7 @@ * ============LICENSE_START======================================================= * Copyright (C) 2021-2022 Nordix Foundation * Modifications Copyright (C) 2021-2022 Bell Canada. + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -28,6 +29,7 @@ import org.onap.cps.spi.exceptions.DataspaceNotFoundException import org.onap.cps.spi.exceptions.SchemaSetNotFoundException import org.onap.cps.spi.model.ModuleDefinition import org.onap.cps.spi.model.ModuleReference +import org.onap.cps.spi.model.SchemaSet import org.onap.cps.spi.repository.AnchorRepository import org.onap.cps.spi.repository.SchemaSetRepository import org.onap.cps.spi.repository.SchemaSetYangResourceRepositoryImpl @@ -209,6 +211,14 @@ class CpsModulePersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase } @Sql([CLEAR_DATA, SET_DATA]) + def 'Retrieve schema sets for a given dataspace name'() { + when: 'the schema set resources for a given dataspace name is retrieved' + def result = objectUnderTest.getSchemaSetsByDataspaceName(DATASPACE_NAME) + then: 'the correct resources are returned' + result.contains(new SchemaSet(name: 'SCHEMA-SET-001', dataspaceName: 'DATASPACE-001')) + } + + @Sql([CLEAR_DATA, SET_DATA]) def 'Delete schema set'() { when: 'a schema set is deleted with cascade-prohibited option' objectUnderTest.deleteSchemaSet(DATASPACE_NAME, SCHEMA_SET_NAME_NO_ANCHORS) @@ -220,8 +230,9 @@ class CpsModulePersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase def 'Identifying new module references where #scenario'() { when: 'identifyNewModuleReferences is called' def result = objectUnderTest.identifyNewModuleReferences(moduleReferences) - then: 'the correct module reference collection is returned' - assert result == expectedResult + then: 'the correct module references are returned' + assert result.size() == expectedResult.size() + assert result.containsAll(expectedResult) where: 'the following data is used' scenario | moduleReferences || expectedResult 'new module references exist' | toModuleReference([['some module 1' : 'some revision 1'], ['some module 2' : 'some revision 2']]) || toModuleReference([['some module 1' : 'some revision 1'], ['some module 2' : 'some revision 2']]) @@ -294,7 +305,7 @@ class CpsModulePersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase def moduleReferences = [].withDefault { [:] } moduleReferenceAsMap.forEach(property -> property.forEach((moduleName, revision) -> { - moduleReferences.add(new ModuleReference('moduleName' : moduleName, 'revision' : revision)) + moduleReferences.add(new ModuleReference(moduleName, revision)) })) return moduleReferences } diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy new file mode 100644 index 0000000000..3bbae2d08c --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy @@ -0,0 +1,74 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.impl + +import org.onap.cps.spi.model.DataNode +import org.onap.cps.spi.model.DataNodeBuilder + +class CpsPersistencePerfSpecBase extends CpsPersistenceSpecBase { + + static final String PERF_TEST_DATA = '/data/perf-test.sql' + static final String PERF_DATASPACE = 'PERF-DATASPACE' + static final String PERF_ANCHOR = 'PERF-ANCHOR' + static final String PERF_TEST_PARENT = '/perf-parent-1' + + static def xpathsToAllGrandChildren = [] + + def createLineage(cpsDataPersistenceService, numberOfChildren, numberOfGrandChildren, createLists) { + xpathsToAllGrandChildren = [] + (1..numberOfChildren).each { + if (createLists) { + def xpathFormat = "${PERF_TEST_PARENT}/perf-test-list-${it}[@key='%d']" + def listElements = goForthAndMultiply(xpathFormat, numberOfGrandChildren) + cpsDataPersistenceService.addListElements(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT, listElements) + } else { + def xpathFormat = "${PERF_TEST_PARENT}/perf-test-child-${it}/perf-test-grand-child-%d" + def grandChildren = goForthAndMultiply(xpathFormat, numberOfGrandChildren) + def child = new DataNodeBuilder() + .withXpath("${PERF_TEST_PARENT}/perf-test-child-${it}") + .withChildDataNodes(grandChildren) + .build() + cpsDataPersistenceService.addChildDataNode(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT, child) + } + } + } + + def goForthAndMultiply(xpathFormat, numberOfGrandChildren) { + def grandChildren = [] + (1..numberOfGrandChildren).each { + def xpath = String.format(xpathFormat as String, it) + def grandChild = new DataNodeBuilder().withXpath(xpath).build() + xpathsToAllGrandChildren.add(grandChild.xpath) + grandChildren.add(grandChild) + } + return grandChildren + } + + def countDataNodes(dataNodes) { + int nodeCount = 1 + for (DataNode parent : dataNodes) { + for (DataNode child : parent.childDataNodes) { + nodeCount = nodeCount + (countDataNodes(child)) + } + } + return nodeCount + } +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy new file mode 100644 index 0000000000..5aae285d7b --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy @@ -0,0 +1,154 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.performance + +import org.onap.cps.spi.CpsDataPersistenceService +import org.onap.cps.spi.impl.CpsPersistencePerfSpecBase +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.test.context.jdbc.Sql +import org.springframework.util.StopWatch + +import java.util.concurrent.TimeUnit + +class CpsDataPersistenceServiceDeletePerfTest extends CpsPersistencePerfSpecBase { + + @Autowired + CpsDataPersistenceService objectUnderTest + + static def NUMBER_OF_CHILDREN = 100 + static def NUMBER_OF_GRAND_CHILDREN = 50 + static def NUMBER_OF_LISTS = 100 + static def NUMBER_OF_LIST_ELEMENTS = 50 + static def ALLOWED_SETUP_TIME_MS = TimeUnit.SECONDS.toMillis(10) + + def stopWatch = new StopWatch() + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Create a node with many descendants (please note, subsequent tests depend on this running first).'() { + given: 'a node with a large number of descendants is created' + stopWatch.start() + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + stopWatch.stop() + def setupDurationInMillis = stopWatch.getTotalTimeMillis() + and: 'setup duration is under #ALLOWED_SETUP_TIME_MS milliseconds' + assert setupDurationInMillis < ALLOWED_SETUP_TIME_MS + } + + def 'Delete 5 children with grandchildren'() { + when: 'child nodes are deleted' + stopWatch.start() + (1..5).each { + def childPath = "${PERF_TEST_PARENT}/perf-test-child-${it}".toString(); + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, childPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 6000 milliseconds' + assert deleteDurationInMillis < 6000 + } + + def 'Delete 50 grandchildren (that have no descendants)'() { + when: 'target nodes are deleted' + stopWatch.start() + (1..50).each { + def grandchildPath = "${PERF_TEST_PARENT}/perf-test-child-6/perf-test-grand-child-${it}".toString(); + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, grandchildPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 500 milliseconds' + assert deleteDurationInMillis < 500 + } + + def 'Delete 1 large data node with many descendants'() { + when: 'parent node is deleted' + stopWatch.start() + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT) + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 2500 milliseconds' + assert deleteDurationInMillis < 2500 + } + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Create a node with many list elements (please note, subsequent tests depend on this running first).'() { + given: 'a node with a large number of descendants is created' + stopWatch.start() + createLineage(objectUnderTest, NUMBER_OF_LISTS, NUMBER_OF_LIST_ELEMENTS, true) + stopWatch.stop() + def setupDurationInMillis = stopWatch.getTotalTimeMillis() + and: 'setup duration is under #ALLOWED_SETUP_TIME_MS milliseconds' + assert setupDurationInMillis < ALLOWED_SETUP_TIME_MS + } + + def 'Delete 5 whole lists with many elements'() { + when: 'list nodes are deleted' + stopWatch.start() + (1..5).each { + def childPath = "${PERF_TEST_PARENT}/perf-test-list-${it}".toString(); + objectUnderTest.deleteListDataNode(PERF_DATASPACE, PERF_ANCHOR, childPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 4000 milliseconds' + assert deleteDurationInMillis < 4000 + } + + def 'Delete 10 list elements with keys'() { + when: 'list elements are deleted' + stopWatch.start() + (1..10).each { + def key = it.toString() + def grandchildPath = "${PERF_TEST_PARENT}/perf-test-list-6[@key='${key}']" + objectUnderTest.deleteListDataNode(PERF_DATASPACE, PERF_ANCHOR, grandchildPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 6000 milliseconds' + assert deleteDurationInMillis < 6000 + } + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Delete root node with many descendants'() { + given: 'a node with a large number of descendants is created' + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + when: 'root node is deleted' + stopWatch.start() + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, '/') + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 250 milliseconds' + assert deleteDurationInMillis < 250 + } + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Delete data nodes for an anchor'() { + given: 'a node with a large number of descendants is created' + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + when: 'data nodes are deleted' + stopWatch.start() + objectUnderTest.deleteDataNodes(PERF_DATASPACE, PERF_ANCHOR) + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 250 milliseconds' + assert deleteDurationInMillis < 250 + } +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy new file mode 100644 index 0000000000..2346239dff --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy @@ -0,0 +1,125 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.performance + +import org.onap.cps.spi.impl.CpsPersistencePerfSpecBase +import org.springframework.util.StopWatch +import org.onap.cps.spi.CpsDataPersistenceService +import org.onap.cps.spi.repository.AnchorRepository +import org.onap.cps.spi.repository.DataspaceRepository +import org.onap.cps.spi.repository.FragmentRepository +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.test.context.jdbc.Sql + +import java.util.concurrent.TimeUnit + +import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS +import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS + +class CpsDataPersistenceServicePerfTest extends CpsPersistencePerfSpecBase { + + @Autowired + CpsDataPersistenceService objectUnderTest + + @Autowired + DataspaceRepository dataspaceRepository + + @Autowired + AnchorRepository anchorRepository + + @Autowired + FragmentRepository fragmentRepository + + static def NUMBER_OF_CHILDREN = 200 + static def NUMBER_OF_GRAND_CHILDREN = 50 + static def TOTAL_NUMBER_OF_NODES = 1 + NUMBER_OF_CHILDREN + (NUMBER_OF_CHILDREN * NUMBER_OF_GRAND_CHILDREN) // Parent + Children + Grand-children + + def stopWatch = new StopWatch() + def readStopWatch = new StopWatch() + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Create a node with many descendants (please note, subsequent tests depend on this running first).'() { + given: 'a node with a large number of descendants is created' + stopWatch.start() + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + stopWatch.stop() + def setupDurationInMillis = stopWatch.getTotalTimeMillis() + and: 'setup duration is under 10 seconds' + assert setupDurationInMillis < 10000 + } + + def 'Get data node with many descendants by xpath #scenario'() { + when: 'get parent is executed with all descendants' + stopWatch.start() + def result = objectUnderTest.getDataNode(PERF_DATASPACE, PERF_ANCHOR, xpath, INCLUDE_ALL_DESCENDANTS) + stopWatch.stop() + def readDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'read duration is under 500 milliseconds' + assert readDurationInMillis < 500 + and: 'data node is returned with all the descendants populated' + assert countDataNodes(result) == TOTAL_NUMBER_OF_NODES + where: 'the following xPaths are used' + scenario || xpath + 'parent' || PERF_TEST_PARENT + 'root' || '' + } + + def 'Query parent data node with many descendants by cps-path'() { + when: 'query is executed with all descendants' + stopWatch.start() + def result = objectUnderTest.queryDataNodes(PERF_DATASPACE, PERF_ANCHOR, '//perf-parent-1' , INCLUDE_ALL_DESCENDANTS) + stopWatch.stop() + def readDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'read duration is under 500 milliseconds' + assert readDurationInMillis < 500 + and: 'data node is returned with all the descendants populated' + assert countDataNodes(result) == TOTAL_NUMBER_OF_NODES + } + + def 'Performance of finding multiple xpaths'() { + when: 'we query for all grandchildren (except 1 for fun) with the new native method' + xpathsToAllGrandChildren.remove(0) + readStopWatch.start() + def result = objectUnderTest.getDataNodes(PERF_DATASPACE, PERF_ANCHOR, xpathsToAllGrandChildren, INCLUDE_ALL_DESCENDANTS) + readStopWatch.stop() + def readDurationInMillis = readStopWatch.getTotalTimeMillis() + then: 'the returned number of entities equal to the number of children * number of grandchildren' + assert result.size() == xpathsToAllGrandChildren.size() + and: 'it took less then 4000ms' + assert readDurationInMillis < 4000 + } + + def 'Query many descendants by cps-path with #scenario'() { + when: 'query is executed with all descendants' + stopWatch.start() + def result = objectUnderTest.queryDataNodes(PERF_DATASPACE, PERF_ANCHOR, '//perf-test-grand-child-1', descendantsOption) + stopWatch.stop() + def readDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'read duration is under #allowedDuration milliseconds' + assert readDurationInMillis < allowedDuration + and: 'data node is returned with all the descendants populated' + assert result.size() == NUMBER_OF_CHILDREN + where: 'the following options are used' + scenario | descendantsOption || allowedDuration + 'omit descendants ' | OMIT_DESCENDANTS || 150 + 'include descendants (although there are none)' | INCLUDE_ALL_DESCENDANTS || 150 + } +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy new file mode 100644 index 0000000000..9b722cddae --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy @@ -0,0 +1,103 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.performance + +import org.onap.cps.spi.CpsModulePersistenceService +import org.onap.cps.spi.entities.SchemaSetEntity +import org.onap.cps.spi.impl.CpsPersistenceSpecBase +import org.onap.cps.spi.model.ModuleReference +import org.onap.cps.spi.repository.DataspaceRepository +import org.onap.cps.spi.repository.ModuleReferenceRepository +import org.onap.cps.spi.repository.SchemaSetRepository +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.test.context.jdbc.Sql +import org.springframework.util.StopWatch + +import java.util.concurrent.ThreadLocalRandom + +class CpsModuleReferenceRepositoryPerfTest extends CpsPersistenceSpecBase { + + static final String PERF_TEST_DATA = '/data/perf-test.sql' + + def NEW_RESOURCE_CONTENT = 'module stores {\n' + + ' yang-version 1.1;\n' + + ' namespace "org:onap:ccsdk:sample";\n' + + '\n' + + ' prefix book-store;\n' + + '\n' + + ' revision "2020-09-15" {\n' + + ' description\n' + + ' "Sample Model";\n' + + ' }' + + '}' + + @Autowired + CpsModulePersistenceService objectUnderTest + + @Autowired + DataspaceRepository dataspaceRepository + + @Autowired + SchemaSetRepository schemaSetRepository + + @Autowired + ModuleReferenceRepository moduleReferenceRepository + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Store new schema set with many modules'() { + when: 'a new schema set with 200 modules is stored' + def newYangResourcesNameToContentMap = [:] + (1..200).each { + def year = 2000 + it + def resourceName = "module${it}".toString() + def moduleName = "stores${it}" + def content = NEW_RESOURCE_CONTENT.replace('2020',String.valueOf(year)).replace('stores',moduleName) + newYangResourcesNameToContentMap.put(resourceName, content) + } + objectUnderTest.storeSchemaSet('PERF-DATASPACE', 'perfSchemaSet', newYangResourcesNameToContentMap) + then: 'the schema set is persisted correctly' + def dataspaceEntity = dataspaceRepository.getByName('PERF-DATASPACE') + SchemaSetEntity result = schemaSetRepository.getByDataspaceAndName(dataspaceEntity, 'perfSchemaSet') + result.yangResources.size() == 200 + and: 'identification of new module resources is fast enough (1,000 executions less then 6,000 milliseconds)' + def stopWatch = new StopWatch() + 1000.times() { + def moduleReferencesToCheck = createModuleReferencesWithRandomMatchingExistingModuleReferences() + stopWatch.start() + def newModuleReferences = moduleReferenceRepository.identifyNewModuleReferences(moduleReferencesToCheck) + stopWatch.stop() + assert newModuleReferences.size() > 0 && newModuleReferences.size() < 300 + } + assert stopWatch.getTotalTimeMillis() < 6000 + } + + def createModuleReferencesWithRandomMatchingExistingModuleReferences() { + def moduleReferences = [] + (1..250).each { + def randomNumber = ThreadLocalRandom.current().nextInt(1, 300) + def year = 2000 + randomNumber + def moduleName = "stores${randomNumber}" + moduleReferences.add(new ModuleReference(moduleName, "${year}-09-15")) + } + return moduleReferences + } + +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy deleted file mode 100644 index fb6749c3fe..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.performance - -import org.apache.commons.lang3.time.StopWatch -import org.onap.cps.spi.CpsDataPersistenceService -import org.onap.cps.spi.impl.CpsPersistenceSpecBase -import org.onap.cps.spi.model.DataNode -import org.onap.cps.spi.model.DataNodeBuilder -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.test.context.jdbc.Sql -import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS - -class CpsToDataNodePerfTest extends CpsPersistenceSpecBase { - - static final String PERF_TEST_DATA = '/data/perf-test.sql' - - @Autowired - CpsDataPersistenceService objectUnderTest - - def PERF_TEST_PARENT = '/perf-parent-1' - - def EXPECTED_NUMBER_OF_NODES = 10051 // 1 Parent + 50 Children + 10000 Grand-children - - @Sql([CLEAR_DATA, PERF_TEST_DATA]) - def 'Get data node by xpath with all descendants with many children'() { - given: 'nodes and grandchildren have been persisted' - def setupStopWatch = new StopWatch() - setupStopWatch.start() - createLineage() - setupStopWatch.stop() - def setupDurationInMillis = setupStopWatch.getTime() - and: 'setup duration is under 8000 milliseconds' - assert setupDurationInMillis < 8000 - when: 'get parent is executed with all descendants' - def readStopWatch = new StopWatch() - readStopWatch.start() - def result = objectUnderTest.getDataNode('PERF-DATASPACE', 'PERF-ANCHOR', PERF_TEST_PARENT, INCLUDE_ALL_DESCENDANTS) - readStopWatch.stop() - def readDurationInMillis = readStopWatch.getTime() - then: 'read duration is under 450 milliseconds' - assert readDurationInMillis < 450 - and: 'data node is returned with all the descendants populated' - assert countDataNodes(result) == EXPECTED_NUMBER_OF_NODES - when: 'get root is executed with all descendants' - readStopWatch.reset() - readStopWatch.start() - result = objectUnderTest.getDataNode('PERF-DATASPACE', 'PERF-ANCHOR', '', INCLUDE_ALL_DESCENDANTS) - readStopWatch.stop() - readDurationInMillis = readStopWatch.getTime() - then: 'read duration is under 450 milliseconds' - assert readDurationInMillis < 450 - and: 'data node is returned with all the descendants populated' - assert countDataNodes(result) == EXPECTED_NUMBER_OF_NODES - when: 'query is executed with all descendants' - readStopWatch.reset() - readStopWatch.start() - result = objectUnderTest.queryDataNodes('PERF-DATASPACE', 'PERF-ANCHOR', '//perf-parent-1', INCLUDE_ALL_DESCENDANTS) - readStopWatch.stop() - readDurationInMillis = readStopWatch.getTime() - then: 'read duration is under 450 milliseconds' - assert readDurationInMillis < 450 - and: 'data node is returned with all the descendants populated' - assert countDataNodes(result) == EXPECTED_NUMBER_OF_NODES - } - - def createLineage() { - def numOfChildren = 50 - def numOfGrandChildren = 200 - (1..numOfChildren).each { - def childName = "perf-test-child-${it}".toString() - def newChild = goForthAndMultiply(PERF_TEST_PARENT, childName, numOfGrandChildren) - objectUnderTest.addChildDataNode('PERF-DATASPACE', 'PERF-ANCHOR', PERF_TEST_PARENT, newChild) - } - } - - def goForthAndMultiply(parentXpath, childName, numOfGrandChildren) { - def children = [] - (1..numOfGrandChildren).each { - def child = new DataNodeBuilder().withXpath("${parentXpath}/${childName}/${it}perf-test-grand-child").build() - children.add(child) - } - return new DataNodeBuilder().withXpath("${parentXpath}/${childName}").withChildDataNodes(children).build() - } - - def countDataNodes(dataNodes) { - int nodeCount = 1 - for (DataNode parent : dataNodes) { - for (DataNode child : parent.childDataNodes) { - nodeCount = nodeCount + (countDataNodes(child)) - } - } - return nodeCount - } -} |