diff options
31 files changed, 478 insertions, 334 deletions
diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java new file mode 100644 index 0000000000..d259d91796 --- /dev/null +++ b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java @@ -0,0 +1,79 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.rest.controller; + +import io.swagger.v3.oas.annotations.Hidden; +import java.util.List; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.ncmp.api.datajobs.DataJobService; +import org.onap.cps.ncmp.api.datajobs.models.DataJobMetadata; +import org.onap.cps.ncmp.api.datajobs.models.DataJobRequest; +import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest; +import org.onap.cps.ncmp.api.datajobs.models.SubJobWriteResponse; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestHeader; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * Controller responsible for handling data job write operations. + * This class exposes an API endpoint that accepts a write request for a data job and processes it. + */ +@Slf4j +@RestController +@RequestMapping("/do-not-use/dataJobs") +@RequiredArgsConstructor +public class DataJobControllerForTest { + + private final DataJobService dataJobService; + + /** + * Handles POST requests to write a data job. This endpoint is unsupported and intended for testing purposes only. + * This internal endpoint processes a data job write request by extracting necessary metadata and data + * from the request body and delegating the operation to the {@link DataJobService}. + * <p><b>Note:</b> The {@link DataJobRequest} parameter is created and used for testing purposes only. + * In a production environment, data job write operations are not triggered through internal workflows.</p> + * + * @param authorization The optional authorization token sent in the request header. + * @param dataJobId The unique identifier for the data job, extracted from the URL path. + * @param dataJobRequest The request payload containing metadata and data for the data job write operation. + * @return A {@link ResponseEntity} containing a list of {@link SubJobWriteResponse} objects representing the + * status of each sub-job within the data job, or an error response with an appropriate HTTP status code. + */ + @PostMapping("/{dataJobId}/write") + @Hidden + public ResponseEntity<List<SubJobWriteResponse>> writeDataJob(@RequestHeader(value = "Authorization", + required = false) final String authorization, + @PathVariable("dataJobId") final String dataJobId, + @RequestBody final DataJobRequest dataJobRequest) { + log.info("Internal API: writeDataJob invoked for {}", dataJobId); + final DataJobMetadata dataJobMetadata = dataJobRequest.dataJobMetadata(); + final DataJobWriteRequest dataJobWriteRequest = dataJobRequest.dataJobWriteRequest(); + final List<SubJobWriteResponse> subJobWriteResponses = dataJobService.writeDataJob(authorization, dataJobId, + dataJobMetadata, dataJobWriteRequest); + return ResponseEntity.ok(subJobWriteResponses); + } +} + diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy new file mode 100644 index 0000000000..6fc4a699e5 --- /dev/null +++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy @@ -0,0 +1,50 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.rest.controller + +import org.onap.cps.ncmp.api.datajobs.DataJobService +import org.onap.cps.ncmp.api.datajobs.models.DataJobMetadata +import org.onap.cps.ncmp.api.datajobs.models.DataJobRequest +import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest +import org.onap.cps.ncmp.api.datajobs.models.WriteOperation +import org.springframework.http.HttpStatus +import spock.lang.Specification + +class DataJobControllerForTestSpec extends Specification { + + DataJobService mockDataJobService = Mock() + + def objectUnderTest = new DataJobControllerForTest(mockDataJobService) + + def 'Write Data Job request'() { + given: 'a valid datajob write request' + def dataJobMetadata = new DataJobMetadata('some destination', 'some accept type', 'some content type') + def writeOperations = [ new WriteOperation('/path/to/node', 'create', 'op123', 'value1') ] + def dataJobWriteRequest = new DataJobWriteRequest(writeOperations) + def dataJobRequest = new DataJobRequest(dataJobMetadata, dataJobWriteRequest) + when: 'write data job is called' + def result = objectUnderTest.writeDataJob('my authorization', 'my job', dataJobRequest) + then: 'response is 200 OK' + assert result.statusCode == HttpStatus.OK + and: 'the service method is called once with expected parameters' + 1 * mockDataJobService.writeDataJob('my authorization', 'my job', dataJobMetadata, dataJobWriteRequest) + } +} diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy index aad04a18ae..3a9a0bb09c 100644 --- a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy +++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2021 highstreet technologies GmbH - * Modifications Copyright (C) 2021-2024 Nordix Foundation + * Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -102,6 +102,9 @@ class NetworkCmProxyRestExceptionHandlerSpec extends Specification { @SpringBean NcmpPassthroughResourceRequestHandler StubbedNcmpPassthroughResourceRequestHandler = Stub() + @SpringBean + DataJobControllerForTest stubbedDataJobControllerForTest = Stub() + @Value('${rest.api.ncmp-base-path}') def basePathNcmp diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java new file mode 100644 index 0000000000..fe73a601b9 --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java @@ -0,0 +1,24 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.datajobs.models; + +public record DataJobRequest(DataJobMetadata dataJobMetadata, DataJobWriteRequest dataJobWriteRequest) { +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java index 04c3ad2fc6..56352c1c81 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,6 +31,7 @@ import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest; import org.onap.cps.ncmp.api.datajobs.models.DmiWriteOperation; import org.onap.cps.ncmp.api.datajobs.models.ProducerKey; import org.onap.cps.ncmp.api.datajobs.models.SubJobWriteResponse; +import org.onap.cps.utils.JsonObjectMapper; import org.springframework.stereotype.Service; @Slf4j @@ -40,6 +41,7 @@ public class DataJobServiceImpl implements DataJobService { private final DmiSubJobRequestHandler dmiSubJobClient; private final WriteRequestExaminer writeRequestExaminer; + private final JsonObjectMapper jsonObjectMapper; @Override public void readDataJob(final String authorization, @@ -54,14 +56,25 @@ public class DataJobServiceImpl implements DataJobService { final String dataJobId, final DataJobMetadata dataJobMetadata, final DataJobWriteRequest dataJobWriteRequest) { - log.info("data job id for write operation is: {}", dataJobId); + + log.info("Data Job ID: {} - Total operations received: {}", dataJobId, dataJobWriteRequest.data().size()); + logJsonRepresentation("Initiating WRITE operation for Data Job ID: " + dataJobId, dataJobWriteRequest); final Map<ProducerKey, List<DmiWriteOperation>> dmiWriteOperationsPerProducerKey = writeRequestExaminer.splitDmiWriteOperationsFromRequest(dataJobId, dataJobWriteRequest); - return dmiSubJobClient.sendRequestsToDmi(authorization, - dataJobId, - dataJobMetadata, - dmiWriteOperationsPerProducerKey); + final List<SubJobWriteResponse> subJobWriteResponses = dmiSubJobClient.sendRequestsToDmi(authorization, + dataJobId, dataJobMetadata, dmiWriteOperationsPerProducerKey); + + log.info("Data Job ID: {} - Received {} sub-job(s) from DMI.", dataJobId, subJobWriteResponses.size()); + logJsonRepresentation("Finalized subJobWriteResponses for Data Job ID: " + dataJobId, subJobWriteResponses); + return subJobWriteResponses; + } + + private void logJsonRepresentation(final String description, final Object object) { + if (log.isDebugEnabled()) { + final String objectAsJsonString = jsonObjectMapper.asJsonString(object); + log.debug("{} (JSON): {}", description, objectAsJsonString); + } } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java index bdf3785a7a..8d1d50ec15 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java @@ -29,10 +29,10 @@ import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY import com.hazelcast.map.IMap; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsQueryService; @@ -54,6 +54,7 @@ import org.springframework.stereotype.Component; @Component public class CmHandleQueryServiceImpl implements CmHandleQueryService { private static final String ANCESTOR_CM_HANDLES = "/ancestor::cm-handles"; + public static final String CM_HANDLE_ID = "id"; private static final String ALTERNATE_ID = "alternate-id"; private static final Integer NO_LIMIT = 0; private final CpsDataService cpsDataService; @@ -147,7 +148,7 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService { @Override public Collection<String> getAllCmHandleReferences(final boolean outputAlternateId) { - final String attributeName = outputAlternateId ? ALTERNATE_ID : "id"; + final String attributeName = outputAlternateId ? ALTERNATE_ID : CM_HANDLE_ID; final String cpsPath = String.format("%s/cm-handles/@%s", NCMP_DMI_REGISTRY_PARENT, attributeName); return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class); } @@ -177,20 +178,23 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService { for (final Map.Entry<String, TrustLevel> mapEntry : trustLevelPerDmiPlugin.entrySet()) { final String dmiPluginIdentifier = mapEntry.getKey(); final TrustLevel dmiTrustLevel = mapEntry.getValue(); - final Collection<String> candidateCmHandleIds = getCmHandleReferencesByDmiPluginIdentifier( - dmiPluginIdentifier, false); - final Set<String> candidateCmHandleIdsSet = new HashSet<>(candidateCmHandleIds); + final Map<String, String> candidateCmHandleReferences = + getCmHandleReferencesMapByDmiPluginIdentifier(dmiPluginIdentifier); final Map<String, TrustLevel> trustLevelPerCmHandleIdInBatch = - trustLevelPerCmHandleId.getAll(candidateCmHandleIdsSet); - trustLevelPerCmHandleIdInBatch.forEach((cmHandleId, trustLevel) -> { - final TrustLevel effectiveTrustLevel = trustLevel.getEffectiveTrustLevel(dmiTrustLevel); - if (targetTrustLevel.equals(effectiveTrustLevel)) { - selectedCmHandleReferences.add(cmHandleId); + trustLevelPerCmHandleId.getAll(candidateCmHandleReferences.keySet()); + for (final Map.Entry<String, String> candidateCmHandleReference : candidateCmHandleReferences.entrySet()) { + final TrustLevel candidateCmHandleTrustLevel = + trustLevelPerCmHandleIdInBatch.get(candidateCmHandleReference.getKey()); + final TrustLevel effectiveTrustlevel = + candidateCmHandleTrustLevel.getEffectiveTrustLevel(dmiTrustLevel); + if (targetTrustLevel.equals(effectiveTrustlevel)) { + if (outputAlternateId) { + selectedCmHandleReferences.add(candidateCmHandleReference.getValue()); + } else { + selectedCmHandleReferences.add(candidateCmHandleReference.getKey()); + } } - }); - } - if (outputAlternateId) { - return getAlternateIdsByCmHandleIds(selectedCmHandleReferences); + } } return selectedCmHandleReferences; } @@ -220,40 +224,47 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService { private Set<String> getIdsByDmiPluginIdentifierAndDmiProperty(final String dmiPluginIdentifier, final String dmiProperty, final boolean outputAlternateId) { - final String attributeName = outputAlternateId ? ALTERNATE_ID : "id"; + final String attributeName = outputAlternateId ? ALTERNATE_ID : CM_HANDLE_ID; final String cpsPath = String.format("%s/cm-handles[@%s='%s']/@%s", NCMP_DMI_REGISTRY_PARENT, dmiProperty, dmiPluginIdentifier, attributeName); return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class); } - private Collection<String> getAlternateIdsByCmHandleIds(final Collection<String> cmHandleIds) { - - final String cpsPath = NCMP_DMI_REGISTRY_PARENT + "/cm-handles[" - + createFormattedQueryString(cmHandleIds) + "]/@alternate-id"; + private Collection<DataNode> getDataNodesByDmiPluginIdentifierAndDmiProperty(final String dmiPluginIdentifier, + final String dmiProperty) { + return cpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, + NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@" + dmiProperty + "='" + dmiPluginIdentifier + "']", + OMIT_DESCENDANTS); + } - return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class); + private Map<String, String> getCmHandleReferencesMapByDmiPluginIdentifier(final String dmiPluginIdentifier) { + final Map<String, String> cmHandleReferencesMap = new HashMap<>(); + for (final ModelledDmiServiceLeaves modelledDmiServiceLeaf : ModelledDmiServiceLeaves.values()) { + final Collection<DataNode> cmHandlesAsDataNodes = getDataNodesByDmiPluginIdentifierAndDmiProperty( + dmiPluginIdentifier, modelledDmiServiceLeaf.getLeafName()); + for (final DataNode cmHandleAsDataNode : cmHandlesAsDataNodes) { + final String cmHandleId = cmHandleAsDataNode.getLeaves().get(CM_HANDLE_ID).toString(); + final String alternateId = cmHandleAsDataNode.getLeaves().get(ALTERNATE_ID).toString(); + cmHandleReferencesMap.put(cmHandleId, alternateId); + } + } + return cmHandleReferencesMap; } private Collection<String> getCmHandleReferencesByProperties(final PropertyType propertyType, final String propertyName, final String propertyValue, final boolean outputAlternateId) { - final String attributeName = outputAlternateId ? ALTERNATE_ID : "id"; + final String attributeName = outputAlternateId ? ALTERNATE_ID : CM_HANDLE_ID; final String cpsPath = String.format("//%s[@name='%s' and @value='%s']%s/@%s", propertyType.getYangContainerName(), propertyName, propertyValue, ANCESTOR_CM_HANDLES, attributeName); return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class); } - private String createFormattedQueryString(final Collection<String> cmHandleIds) { - return cmHandleIds.stream() - .map(cmHandleId -> "@id='" + cmHandleId + "'") - .collect(Collectors.joining(" or ")); - } - private DataNode getCmHandleState(final String cmHandleId) { cpsValidator.validateNameCharacters(cmHandleId); final String xpath = NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@id='" + cmHandleId + "']/state"; return cpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, xpath, OMIT_DESCENDANTS).iterator().next(); } -}
\ No newline at end of file +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java index 6bb1bfc86c..aeeb86592c 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2025 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -118,7 +118,7 @@ public interface InventoryPersistence extends NcmpPersistence { * Get data node with the given cm handle id. * * @param cmHandleId cmHandle ID - * @param fetchDescendantsOption fetchDescendantsOption + * @param fetchDescendantsOption fetch descendants option * @return data node */ Collection<DataNode> getCmHandleDataNodeByCmHandleId(String cmHandleId, @@ -144,9 +144,11 @@ public interface InventoryPersistence extends NcmpPersistence { * Get collection of data nodes of given cm handles. * * @param cmHandleIds collection of cmHandle IDs + * @param fetchDescendantsOption fetch descendants option * @return collection of data nodes */ - Collection<DataNode> getCmHandleDataNodes(Collection<String> cmHandleIds); + Collection<DataNode> getCmHandleDataNodes(Collection<String> cmHandleIds, + FetchDescendantsOption fetchDescendantsOption); /** * get CM handles that has given module names. diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java index 02e711287e..88322903a3 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2025 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * Modifications Copyright (C) 2024 TechMahindra Ltd. * ================================================================================ @@ -34,8 +34,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; import org.onap.cps.api.CpsAnchorService; import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsModuleService; @@ -134,7 +134,7 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv dataValidationException.getMessage()); } }); - return YangDataConverter.toYangModelCmHandles(getCmHandleDataNodes(validCmHandleIds)); + return YangDataConverter.toYangModelCmHandles(getCmHandleDataNodes(validCmHandleIds, INCLUDE_ALL_DESCENDANTS)); } @Override @@ -201,22 +201,22 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv } @Override - public Collection<DataNode> getCmHandleDataNodes(final Collection<String> cmHandleIds) { + public Collection<DataNode> getCmHandleDataNodes(final Collection<String> cmHandleIds, + final FetchDescendantsOption fetchDescendantsOption) { final Collection<String> xpaths = new ArrayList<>(cmHandleIds.size()); cmHandleIds.forEach(cmHandleId -> xpaths.add(getXPathForCmHandleById(cmHandleId))); - return this.getDataNodes(xpaths); + return this.getDataNodes(xpaths, fetchDescendantsOption); } @Override public Collection<String> getCmHandleReferencesWithGivenModules(final Collection<String> moduleNamesForQuery, final boolean outputAlternateId) { - if (outputAlternateId) { - final Collection<String> cmHandleIds = + final Collection<String> cmHandleIds = cpsAnchorService.queryAnchorNames(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, moduleNamesForQuery); - return getAlternateIdsFromDataNodes(getCmHandleDataNodes(cmHandleIds)); - } else { - return cpsAnchorService.queryAnchorNames(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, moduleNamesForQuery); + if (outputAlternateId) { + return getAlternateIdsForCmHandleIds(cmHandleIds); } + return cmHandleIds; } @Override @@ -241,12 +241,6 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@alternate-id='", "']")); } - private String getCpsPathForCmHandlesByReferences(final Collection<String> cmHandleReferences) { - return cmHandleReferences.stream() - .flatMap(id -> Stream.of("@id='" + id + "'", "@alternate-id='" + id + "'")) - .collect(Collectors.joining(" or ", NCMP_DMI_REGISTRY_PARENT + "/cm-handles[", "]")); - } - private static String createStateJsonData(final String state) { return "{\"state\":" + state + "}"; } @@ -255,8 +249,13 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv return "{\"cm-handles\":" + jsonObjectMapper.asJsonString(yangModelCmHandles) + "}"; } - private Collection<String> getAlternateIdsFromDataNodes(final Collection<DataNode> dataNodes) { - return dataNodes.stream().map(dataNode -> - (String) dataNode.getLeaves().get("alternate-id")).collect(Collectors.toSet()); + + private Collection<String> getAlternateIdsForCmHandleIds(final Collection<String> cmHandleIds) { + final Collection<DataNode> dataNodes = getCmHandleDataNodes(cmHandleIds, OMIT_DESCENDANTS); + return dataNodes.stream() + .map(DataNode::getLeaves) + .map(leaves -> (String) leaves.get("alternate-id")) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toSet()); } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java index 6076895f0f..bafb06578e 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java @@ -288,14 +288,4 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan } } - private Collection<String> collectCmHandleReferencesFromDataNodes(final Collection<DataNode> dataNodes, - final boolean outputAlternateId) { - if (outputAlternateId) { - return dataNodes.stream().map(dataNode -> - (String) dataNode.getLeaves().get("alternate-id")).collect(Collectors.toSet()); - } else { - return dataNodes.stream().map(dataNode -> - (String) dataNode.getLeaves().get("id")).collect(Collectors.toSet()); - } - } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy index 4b536b9710..9f0e134466 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import org.onap.cps.ncmp.api.datajobs.models.DataJobReadRequest import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest import org.onap.cps.ncmp.api.datajobs.models.ReadOperation import org.onap.cps.ncmp.api.datajobs.models.WriteOperation +import org.onap.cps.utils.JsonObjectMapper import org.slf4j.LoggerFactory import spock.lang.Specification @@ -36,8 +37,9 @@ class DataJobServiceImplSpec extends Specification { def mockWriteRequestExaminer = Mock(WriteRequestExaminer) def mockDmiSubJobRequestHandler = Mock(DmiSubJobRequestHandler) + def mockJsonObjectMapper = Mock(JsonObjectMapper) - def objectUnderTest = new DataJobServiceImpl(mockDmiSubJobRequestHandler, mockWriteRequestExaminer) + def objectUnderTest = new DataJobServiceImpl(mockDmiSubJobRequestHandler, mockWriteRequestExaminer, mockJsonObjectMapper) def myDataJobMetadata = new DataJobMetadata('', '', '') def authorization = 'my authorization header' @@ -45,7 +47,7 @@ class DataJobServiceImplSpec extends Specification { def logger = Spy(ListAppender<ILoggingEvent>) def setup() { - setupLogger() + setupLogger(Level.DEBUG) } def cleanup() { @@ -62,22 +64,32 @@ class DataJobServiceImplSpec extends Specification { assert loggingEvent.formattedMessage.contains('data job id for read operation is: my-job-id') } - def 'Write data-job request.'() { + def 'Write data-job request and verify logging when info enabled.'() { given: 'data job metadata and write request' def dataJobWriteRequest = new DataJobWriteRequest([new WriteOperation('', '', '', null)]) - and: 'a map of producer key and dmi 3gpp write operation' + and: 'a map of producer key and DMI 3GPP write operations' def dmiWriteOperationsPerProducerKey = [:] - when: 'write data job request is processed' + and: 'mocking the splitDmiWriteOperationsFromRequest method to return the expected data' + mockWriteRequestExaminer.splitDmiWriteOperationsFromRequest(_, _) >> dmiWriteOperationsPerProducerKey + and: 'mocking the sendRequestsToDmi method to simulate empty sub-job responses from the DMI request handler' + mockDmiSubJobRequestHandler.sendRequestsToDmi(authorization, 'my-job-id', myDataJobMetadata, dmiWriteOperationsPerProducerKey) >> [] + when: 'the write data job request is processed' objectUnderTest.writeDataJob(authorization, 'my-job-id', myDataJobMetadata, dataJobWriteRequest) then: 'the examiner service is called and a map is returned' 1 * mockWriteRequestExaminer.splitDmiWriteOperationsFromRequest('my-job-id', dataJobWriteRequest) >> dmiWriteOperationsPerProducerKey - and: 'the dmi request handler is called with the result from the examiner' - 1 * mockDmiSubJobRequestHandler.sendRequestsToDmi(authorization, 'my-job-id', myDataJobMetadata, dmiWriteOperationsPerProducerKey) + and: 'write operation details are logged at debug level' + with(logger.list.find { it.level == Level.DEBUG }) { + assert it.formattedMessage.contains("Initiating WRITE operation for Data Job ID: my-job-id") + } + and: 'number of operations are logged at info level' + with(logger.list.find { it.level == Level.INFO }) { + assert it.formattedMessage.contains("Data Job ID: my-job-id - Total operations received: 1") + } } - def setupLogger() { + def setupLogger(Level level) { def setupLogger = ((Logger) LoggerFactory.getLogger(DataJobServiceImpl.class)) - setupLogger.setLevel(Level.DEBUG) + setupLogger.setLevel(level) setupLogger.addAppender(logger) logger.start() } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy index 2b0997b523..bc21360c47 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2025 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * Modifications Copyright (C) 2024 TechMahindra Ltd. * ================================================================================ @@ -336,15 +336,15 @@ class InventoryPersistenceImplSpec extends Specification { } def 'Get Alternate Ids for CM Handles that has given module names'() { - given: 'A Collection of data nodes' - def dataNodes = [new DataNode(xpath: "/dmi-registry/cm-handles[@id='ch-1']", leaves: ['id': 'ch-1', 'alternate-id': 'alt-1'])] - when: 'the methods to get dataNodes is called and returns correct values' + given: 'cps anchor service returns a CM-handle ID for the given module name' mockCpsAnchorService.queryAnchorNames(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, ['sample-module-name']) >> ['ch-1'] - mockCpsDataService.getDataNodesForMultipleXpaths(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, ["/dmi-registry/cm-handles[@id='ch-1']"], INCLUDE_ALL_DESCENDANTS) >> dataNodes - and: 'the method returns a result' + and: 'cps data service returns some data nodes for the given CM-handle ID' + def dataNodes = [new DataNode(xpath: "/dmi-registry/cm-handles[@id='ch-1']", leaves: ['id': 'ch-1', 'alternate-id': 'alt-1'])] + mockCpsDataService.getDataNodesForMultipleXpaths(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, ["/dmi-registry/cm-handles[@id='ch-1']"], OMIT_DESCENDANTS) >> dataNodes + when: 'the method to get cm-handle references by modules is called (outputting alternate IDs)' def result = objectUnderTest.getCmHandleReferencesWithGivenModules(['sample-module-name'], true) then: 'the result contains the correct alternate Id' - assert result == ['alt-1'] as HashSet + assert result == ['alt-1'] as Set } def 'Replace list content'() { diff --git a/csit/plans/cps/pnfsim/docker-compose.yml b/csit/plans/cps/pnfsim/docker-compose.yml deleted file mode 100644 index 869df22789..0000000000 --- a/csit/plans/cps/pnfsim/docker-compose.yml +++ /dev/null @@ -1,27 +0,0 @@ -# ============LICENSE_START======================================================= -# Modifications Copyright (C) 2022-2024 Nordix Foundation -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= - -services: - netconf-pnp-simulator: - image: blueonap/netconf-pnp-simulator:v2.8.6 - container_name: netconf-simulator - restart: always - ports: - - "831:830" - - "6512:6513" - volumes: - - ./netconf-config:/config/modules/stores - - ./tls:/config/tls diff --git a/csit/plans/cps/sdnc/check_sdnc_mount_node.sh b/csit/plans/cps/sdnc/check_sdnc_mount_node.sh deleted file mode 100644 index e92cec717f..0000000000 --- a/csit/plans/cps/sdnc/check_sdnc_mount_node.sh +++ /dev/null @@ -1,82 +0,0 @@ -# ============LICENSE_START======================================================= -# Copyright (C) 2023-2024 Nordix Foundation -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= - -# WAIT 10 minutes maximum and test every 30 seconds if SDNC is up using HealthCheck API -TIME_OUT=600 -INTERVAL=30 -TIME=0 -while [ "$TIME" -lt "$TIME_OUT" ]; do - response=$(curl --write-out '%{http_code}' --silent --output /dev/null -H "Authorization: Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ==" -X POST -H "X-FromAppId: csit-sdnc" -H "X-TransactionId: csit-sdnc" -H "Accept: application/json" -H "Content-Type: application/json" http://$SDNC_HOST:$SDNC_PORT/restconf/operations/SLI-API:healthcheck ); - echo $response - - if [ "$response" == "200" ]; then - echo SDNC started in $TIME seconds - break; - fi - - echo Sleep: $INTERVAL seconds before testing if SDNC is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds - sleep $INTERVAL - TIME=$(($TIME+$INTERVAL)) -done - -if [ "$TIME" -ge "$TIME_OUT" ]; then - echo TIME OUT: karaf session not started in $TIME_OUT seconds... Could cause problems for testing activities... -fi - -###################### mount pnf-sim as PNFDemo ########################## -SDNC_TIME_OUT=250 -SDNC_INTERVAL=10 -SDNC_TIME=0 - -while [ "$SDNC_TIME" -le "$SDNC_TIME_OUT" ]; do - - # Mount netconf node - curl --location --request PUT 'http://'$SDNC_HOST:$SDNC_PORT'/restconf/config/network-topology:network-topology/topology/topology-netconf/node/ietfYang-PNFDemo' \ - --header 'Authorization: Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ==' \ - --header 'Content-Type: application/json' \ - --data-raw '{ - "node": [ - { - "node-id": "ietfYang-PNFDemo", - "netconf-node-topology:protocol": { - "name": "TLS" - }, - "netconf-node-topology:host": "'$LOCAL_IP'", - "netconf-node-topology:key-based": { - "username": "netconf", - "key-id": "ODL_private_key_0" - }, - "netconf-node-topology:port": 6512, - "netconf-node-topology:tcp-only": false, - "netconf-node-topology:max-connection-attempts": 5 - } - ] - }' - - # Verify node has been mounted - - RESPONSE=$( curl --location --request GET 'http://'$SDNC_HOST:$SDNC_PORT'/restconf/config/network-topology:network-topology/topology/topology-netconf' --header 'Authorization: basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ==') - - if [[ "$RESPONSE" == *"ietfYang-PNFDemo"* ]]; then - echo "Node mounted in $SDNC_TIME" - sleep 10 - break; - fi - - sleep $SDNC_INTERVAL - SDNC_TIME=$((SDNC_TIME + SDNC_INTERVAL)) - -done
\ No newline at end of file diff --git a/csit/plans/cps/sdnc/docker-compose.yml b/csit/plans/cps/sdnc/docker-compose.yml deleted file mode 100644 index 29e8293fde..0000000000 --- a/csit/plans/cps/sdnc/docker-compose.yml +++ /dev/null @@ -1,71 +0,0 @@ -# ============LICENSE_START======================================================= -# Modifications Copyright (C) 2022 Nordix Foundation -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= - -services: - mariadb: - image: mariadb:10.5 - ports: - - "3306:3306" - environment: - - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-password} - - MYSQL_ROOT_HOST=% - - MYSQL_USER=${MYSQL_USER:-sdnc} - - MYSQL_PASSWORD=${MYSQL_PASSWORD:-password} - - MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} - logging: - driver: "json-file" - options: - max-size: "30m" - max-file: "5" - - sdnc: - image: onap/sdnc-image:${VERSION:-2.2.3} - container_name: sdnc - depends_on : - - mariadb - entrypoint: ["/opt/onap/sdnc/bin/startODL.sh"] - ports: - - "8282:8181" - hostname: - sdnc - links: - - mariadb:dbhost - - mariadb:sdnctldb01 - - mariadb:sdnctldb02 - environment: - - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-password} - - MYSQL_USER=${MYSQL_USER:-sdnc} - - MYSQL_PASSWORD=${MYSQL_PASSWORD:-password} - - MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} - - SDNC_CONFIG_DIR=/opt/onap/sdnc/data/properties - - SDNC_BIN=/opt/onap/sdnc/bin - - ODL_CERT_DIR=/opt/opendaylight/certs - - ODL_ADMIN_USERNAME=${ODL_USER:-admin} - - ODL_ADMIN_PASSWORD=${ODL_PASSWORD:-Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U} - - SDNC_DB_INIT=true - - SQL_CRYPTKEY=${SQL_CRYPTKEY:-fakECryptKey} - - volumes: - - ./certs/certs.properties:/opt/opendaylight/certs/certs.properties - - ./certs/keys0.zip:/opt/opendaylight/certs/keys0.zip - - dns: - - ${DNS_IP_ADDR-10.0.100.1} - logging: - driver: "json-file" - options: - max-size: "30m" - max-file: "5"
\ No newline at end of file diff --git a/csit/plans/cps/sdnc/sdnc_setup.sh b/csit/plans/cps/sdnc/sdnc_setup.sh deleted file mode 100644 index 61c61fc289..0000000000 --- a/csit/plans/cps/sdnc/sdnc_setup.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# ============LICENSE_START======================================================= -# Copyright (C) 2021-2022 Nordix Foundation. -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# SPDX-License-Identifier: Apache-2.0 -# ============LICENSE_END========================================================= - -# @author Rahul Tyagi (rahul.tyagi@est.tech) -# setup sdnc - -export SDNC_CERT_PATH=$WORKSPACE/plans/cps/sdnc/certs - -#start SDNC containers with docker compose and configuration from docker-compose.yml -docker-compose -f $WORKSPACE/plans/cps/sdnc/docker-compose.yml up -d
\ No newline at end of file diff --git a/csit/plans/cps/setup.sh b/csit/plans/cps/setup.sh index 332be8c461..f22173a9fd 100755 --- a/csit/plans/cps/setup.sh +++ b/csit/plans/cps/setup.sh @@ -5,7 +5,7 @@ # Modifications copyright (c) 2020-2021 Samsung Electronics Co., Ltd. # Modifications Copyright (C) 2021 Pantheon.tech # Modifications Copyright (C) 2021 Bell Canada. -# Modifications Copyright (C) 2021-2025 Nordix Foundation. +# Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,7 +24,7 @@ ###################### setup env ############################ # Set env variables for docker compose -export LOCAL_IP=$((ip -4 addr show docker0 | grep -Po 'inet \K[\d.]+') || hostname -I | awk '{print $1}') +export LOCAL_IP=localhost source $WORKSPACE/plans/cps/test.properties export $(cut -d= -f1 $WORKSPACE/plans/cps/test.properties) @@ -33,14 +33,17 @@ export $(cut -d= -f1 $WORKSPACE/plans/cps/test.properties) cd $CPS_HOME/docker-compose # start CPS/NCMP, DMI Plugin, and PostgreSQL containers with docker compose, waiting for all containers to be healthy -docker-compose --profile dmi-service up -d --quiet-pull --wait || exit 1 - -###################### setup sdnc ####################################### -source $WORKSPACE/plans/cps/sdnc/sdnc_setup.sh - -###################### setup pnfsim ##################################### -docker-compose -f $WORKSPACE/plans/cps/pnfsim/docker-compose.yml up -d +docker-compose --profile dmi-service --profile dmi-stub up -d --quiet-pull --wait || exit 1 ###################### ROBOT Configurations ########################## # Pass variables required for Robot test suites in ROBOT_VARIABLES -ROBOT_VARIABLES="-v CPS_CORE_HOST:$CPS_CORE_HOST -v CPS_CORE_PORT:$CPS_CORE_PORT -v DMI_HOST:$LOCAL_IP -v DMI_PORT:$DMI_PORT -v DMI_VERSION:$DMI_VERSION -v DMI_CSIT_STUB_HOST:$LOCAL_IP -v DMI_CSIT_STUB_PORT:$DMI_DEMO_STUB_PORT -v DMI_AUTH_ENABLED:$DMI_AUTH_ENABLED -v DATADIR_CPS_CORE:$WORKSPACE/data/cps-core -v DATADIR_NCMP:$WORKSPACE/data/ncmp -v DATADIR_SUBS_NOTIFICATION:$WORKSPACE/data/subscription-notification --exitonfailure" +ROBOT_VARIABLES="\ +-v CPS_CORE_HOST:$CPS_CORE_HOST \ +-v CPS_CORE_PORT:$CPS_CORE_PORT \ +-v DMI_HOST:$DMI_HOST \ +-v DMI_PORT:$DMI_PORT \ +-v DMI_CSIT_STUB_HOST:$DMI_DEMO_STUB_HOST \ +-v DMI_CSIT_STUB_PORT:$DMI_DEMO_STUB_PORT \ +-v DATADIR_CPS_CORE:$WORKSPACE/data/cps-core \ +-v DATADIR_NCMP:$WORKSPACE/data/ncmp \ +-v DATADIR_SUBS_NOTIFICATION:$WORKSPACE/data/subscription-notification" diff --git a/csit/plans/cps/test.properties b/csit/plans/cps/test.properties index 52e82bdb85..ce218320af 100644 --- a/csit/plans/cps/test.properties +++ b/csit/plans/cps/test.properties @@ -1,9 +1,9 @@ -DB_HOST=$LOCAL_IP +DB_HOST=dbpostgresql DB_USERNAME=cps DB_PASSWORD=cps -SDNC_HOST=$LOCAL_IP -SDNC_PORT=8282 +SDNC_HOST=sdnc +SDNC_PORT=8181 SDNC_USERNAME=admin SDNC_PASSWORD=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U @@ -12,23 +12,24 @@ CPS_CORE_PORT=8883 CPS_CORE_USERNAME=cpsuser CPS_CORE_PASSWORD=cpsr0cks! -DMI_HOST=$LOCAL_IP -DMI_PORT=8783 +DMI_HOST=ncmp-dmi-plugin +DMI_PORT=8080 DMI_USERNAME=cpsuser DMI_PASSWORD=cpsr0cks! -DMI_SERVICE_URL=http://$LOCAL_IP:$DMI_PORT +DMI_SERVICE_URL=http://$DMI_HOST:$DMI_PORT DOCKER_REPO=nexus3.onap.org:10003 CPS_VERSION=latest DMI_VERSION=latest +DMI_DEMO_STUB_VERSION=latest ADVISED_MODULES_SYNC_SLEEP_TIME_MS=2000 CMHANDLE_DATA_SYNC_SLEEP_TIME_MS=2000 CPS_HOME=$CPS_HOME -DMI_DEMO_STUB_PORT=8784 -DMI_DEMO_STUB_VERSION=latest +DMI_DEMO_STUB_HOST=ncmp-dmi-plugin-demo-and-csit-stub +DMI_DEMO_STUB_PORT=8092 DMI_AUTH_ENABLED=true diff --git a/csit/plans/cps/pnfsim/netconf-config/LICENSE b/docker-compose/config/pnfsim/netconf-config/LICENSE index 3bc5b026c6..3bc5b026c6 100755 --- a/csit/plans/cps/pnfsim/netconf-config/LICENSE +++ b/docker-compose/config/pnfsim/netconf-config/LICENSE diff --git a/csit/plans/cps/pnfsim/netconf-config/stores.yang b/docker-compose/config/pnfsim/netconf-config/stores.yang index 56ad95c8d5..56ad95c8d5 100644 --- a/csit/plans/cps/pnfsim/netconf-config/stores.yang +++ b/docker-compose/config/pnfsim/netconf-config/stores.yang diff --git a/csit/plans/cps/pnfsim/netconf-config/subscriber.py b/docker-compose/config/pnfsim/netconf-config/subscriber.py index 5147c93458..5147c93458 100755 --- a/csit/plans/cps/pnfsim/netconf-config/subscriber.py +++ b/docker-compose/config/pnfsim/netconf-config/subscriber.py diff --git a/csit/plans/cps/pnfsim/tls/ca.pem b/docker-compose/config/pnfsim/tls/ca.pem index 4c4473815c..4c4473815c 100644 --- a/csit/plans/cps/pnfsim/tls/ca.pem +++ b/docker-compose/config/pnfsim/tls/ca.pem diff --git a/csit/plans/cps/pnfsim/tls/server_cert.pem b/docker-compose/config/pnfsim/tls/server_cert.pem index a022dc56ca..a022dc56ca 100644 --- a/csit/plans/cps/pnfsim/tls/server_cert.pem +++ b/docker-compose/config/pnfsim/tls/server_cert.pem diff --git a/csit/plans/cps/pnfsim/tls/server_key.pem b/docker-compose/config/pnfsim/tls/server_key.pem index 02fd68846d..02fd68846d 100644 --- a/csit/plans/cps/pnfsim/tls/server_key.pem +++ b/docker-compose/config/pnfsim/tls/server_key.pem diff --git a/csit/plans/cps/sdnc/certs/certs.properties b/docker-compose/config/sdnc/certs/certs.properties index f8f3fa72b6..f8f3fa72b6 100644 --- a/csit/plans/cps/sdnc/certs/certs.properties +++ b/docker-compose/config/sdnc/certs/certs.properties diff --git a/csit/plans/cps/sdnc/certs/keys0.zip b/docker-compose/config/sdnc/certs/keys0.zip Binary files differindex b2dec5c7b2..b2dec5c7b2 100644 --- a/csit/plans/cps/sdnc/certs/keys0.zip +++ b/docker-compose/config/sdnc/certs/keys0.zip diff --git a/docker-compose/config/sdnc/check_sdnc_mount_node.sh b/docker-compose/config/sdnc/check_sdnc_mount_node.sh new file mode 100644 index 0000000000..8fa4bee8cf --- /dev/null +++ b/docker-compose/config/sdnc/check_sdnc_mount_node.sh @@ -0,0 +1,61 @@ +#!/bin/sh +# ============LICENSE_START======================================================= +# Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= + +set -x # Enable command echoing +apk --no-cache add curl + +SDNC_HOST=${SDNC_HOST:-'sdnc'} +SDNC_PORT=${SDNC_PORT:-8181} +SDNC_AUTH_HEADER=${SDNC_AUTH_HEADER:-'Authorization: Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=='} +PNF_SIM_HOST=${PNF_SIM_HOST:-'pnf-simulator'} +PNF_SIM_PORT=${PNF_SIM_PORT:-6513} +NODE_ID=${NODE_ID:-'ietfYang-PNFDemo'} + +echo "Attempting to mount node with id '$NODE_ID' to SDNC using RestConf" +curl --request PUT "http://$SDNC_HOST:$SDNC_PORT/restconf/config/network-topology:network-topology/topology/topology-netconf/node/$NODE_ID" \ +--silent --location \ +--header "$SDNC_AUTH_HEADER" \ +--header 'Content-Type: application/json' \ +--data-raw '{ + "node": [ + { + "node-id": "'$NODE_ID'", + "netconf-node-topology:protocol": { + "name": "TLS" + }, + "netconf-node-topology:host": "'$PNF_SIM_HOST'", + "netconf-node-topology:key-based": { + "username": "netconf", + "key-id": "ODL_private_key_0" + }, + "netconf-node-topology:port": '$PNF_SIM_PORT', + "netconf-node-topology:tcp-only": false, + "netconf-node-topology:max-connection-attempts": 5 + } + ] +}' + +# Verify node has been mounted +RESPONSE=$(curl --silent --location --request GET "http://$SDNC_HOST:$SDNC_PORT/restconf/config/network-topology:network-topology/topology/topology-netconf" --header "$SDNC_AUTH_HEADER") + +if echo "$RESPONSE" | grep -q "$NODE_ID"; then + echo "Node mounted successfully" + exit 0 +else + echo "Could not mount node to SNDC" + exit 1 +fi diff --git a/docker-compose/docker-compose.yml b/docker-compose/docker-compose.yml index 49646731e2..568fab4810 100644 --- a/docker-compose/docker-compose.yml +++ b/docker-compose/docker-compose.yml @@ -1,7 +1,7 @@ # ============LICENSE_START======================================================= # Copyright (c) 2020 Pantheon.tech. # Modifications Copyright (C) 2021 Bell Canada. -# Modifications Copyright (C) 2022-2025 Nordix Foundation. +# Modifications Copyright (C) 2022-2025 OpenInfra Foundation Europe. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -154,11 +154,14 @@ services: image: ${DOCKER_REPO:-nexus3.onap.org:10003}/onap/ncmp-dmi-plugin:${DMI_VERSION:-1.7.0-SNAPSHOT-latest} ports: - ${DMI_PORT:-8783}:8080 + depends_on: + - sdnc + - pnf-simulator environment: CPS_USERNAME: ${CPS_CORE_USERNAME:-cpsuser} CPS_PASSWORD: ${CPS_CORE_PASSWORD:-cpsr0cks!} - CPS_CORE_HOST: ${CPS_CORE_HOST:-cps-and-ncmp} - CPS_CORE_PORT: ${CPS_CORE_PORT:-8080} + CPS_CORE_HOST: ${CPS_CORE_HOST:-nginx} + CPS_CORE_PORT: ${CPS_CORE_PORT:-80} CPS_CORE_USERNAME: ${CPS_CORE_USERNAME:-cpsuser} CPS_CORE_PASSWORD: ${CPS_CORE_PASSWORD:-cpsr0cks!} SDNC_HOST: ${SDNC_HOST:-sdnc} @@ -197,7 +200,6 @@ services: restart: unless-stopped profiles: - dmi-stub - - dmi-service healthcheck: test: wget -q -O - http://localhost:8092/actuator/health/readiness | grep -q '{"status":"UP"}' || exit 1 interval: 10s @@ -205,6 +207,98 @@ services: retries: 3 start_period: 30s + sdnc: + container_name: sdnc + image: onap/sdnc-image:${SDNC_VERSION:-2.2.3} + entrypoint: /opt/onap/sdnc/bin/startODL.sh + ports: + - 8181:8181 + depends_on: + sdnc-db: + condition: service_healthy + hostname: sdnc + links: + - sdnc-db:dbhost + - sdnc-db:sdnctldb01 + - sdnc-db:sdnctldb02 + environment: + - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-password} + - MYSQL_USER=${MYSQL_USER:-sdnc} + - MYSQL_PASSWORD=${MYSQL_PASSWORD:-password} + - MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} + - SDNC_CONFIG_DIR=/opt/onap/sdnc/data/properties + - SDNC_BIN=/opt/onap/sdnc/bin + - ODL_CERT_DIR=/opt/opendaylight/certs + - ODL_ADMIN_USERNAME=${SDNC_USERNAME:-admin} + - ODL_ADMIN_PASSWORD=${SDNC_PASSWORD:-Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U} + - SDNC_DB_INIT=true + - SQL_CRYPTKEY=${SQL_CRYPTKEY:-fakECryptKey} + volumes: + - ./config/sdnc/certs/certs.properties:/opt/opendaylight/certs/certs.properties + - ./config/sdnc/certs/keys0.zip:/opt/opendaylight/certs/keys0.zip + profiles: + - dmi-service + healthcheck: + test: "wget -q -O - --header 'Authorization: Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ==' http://localhost:8181/restconf/operational/network-topology:network-topology || exit 1" + interval: 10s + timeout: 10s + retries: 6 + start_period: 60s + + sdnc-sidecar: # This container runs a script to mount the PNFDemo node to SDNC, needed for CSITs. + container_name: sdnc-sidecar + image: alpine:latest + volumes: + - ./config/sdnc/check_sdnc_mount_node.sh:/root/check_sdnc_mount_node.sh + command: sh /root/check_sdnc_mount_node.sh + depends_on: + sdnc: + condition: service_healthy + pnf-simulator: + condition: service_healthy + profiles: + - dmi-service + # Note: This container does not need a health-check as it immediately exits with status 0 or 1. + + sdnc-db: + container_name: sdnc-db + image: mariadb:10.5 + ports: + - 3306:3306 + environment: + - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-password} + - MYSQL_ROOT_HOST=% + - MYSQL_USER=${MYSQL_USER:-sdnc} + - MYSQL_PASSWORD=${MYSQL_PASSWORD:-password} + - MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} + profiles: + - dmi-service + healthcheck: + test: healthcheck.sh --connect --innodb_initialized || exit 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 30s + + pnf-simulator: + container_name: pnf-simulator + image: blueonap/netconf-pnp-simulator:v2.8.6 + restart: always + ports: + - 830:830 + - 6513:6513 + volumes: + - ./config/pnfsim/netconf-config:/config/modules/stores + - ./config/pnfsim/tls:/config/tls + profiles: + - dmi-service + healthcheck: + test: nc -z 127.0.0.1 6513 || exit 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 30s + policy-executor-stub: container_name: ${POLICY_EXECUTOR_STUB_CONTAINER_NAME:-policy-executor-stub} image: ${DOCKER_REPO:-nexus3.onap.org:10003}/onap/policy-executor-stub:latest diff --git a/docs/release-notes.rst b/docs/release-notes.rst index 376009d06a..29b8844e64 100644 --- a/docs/release-notes.rst +++ b/docs/release-notes.rst @@ -38,6 +38,7 @@ Release Data Features -------- - `CPS-2416 <https://lf-onap.atlassian.net/browse/CPS-2416>`_ Implement XPath Attribute Axis in CPS + - `CPS-2712 <https://lf-onap.atlassian.net/browse/CPS-2712>`_ Use Flux streaming/buffering for CM-handle searches Version: 3.6.1 ============== diff --git a/k6-tests/ncmp/common/passthrough-crud.js b/k6-tests/ncmp/common/passthrough-crud.js index eed1ab5190..c6732571ba 100644 --- a/k6-tests/ncmp/common/passthrough-crud.js +++ b/k6-tests/ncmp/common/passthrough-crud.js @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,13 +18,12 @@ * ============LICENSE_END========================================================= */ -import { randomIntBetween } from 'https://jslib.k6.io/k6-utils/1.2.0/index.js'; import { performPostRequest, performGetRequest, NCMP_BASE_URL, LEGACY_BATCH_TOPIC_NAME, - TOTAL_CM_HANDLES, + getRandomCmHandleReference, } from './utils.js'; export function passthroughRead(useAlternateId) { @@ -66,11 +65,6 @@ export function legacyBatchRead(cmHandleIds) { return performPostRequest(url, payload, 'batchRead'); } -function getRandomCmHandleReference(useAlternateId) { - const prefix = useAlternateId ? 'Region=NorthAmerica,Segment=' : 'ch-'; - return `${prefix}${randomIntBetween(1, TOTAL_CM_HANDLES)}`; -} - function generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants) { const descendantsParam = includeDescendants ? `&include-descendants=${includeDescendants}` : ''; return `${NCMP_BASE_URL}/ncmp/v1/ch/${cmHandleReference}/data/ds/${datastoreName}?resourceIdentifier=${resourceIdentifier}${descendantsParam}`; diff --git a/k6-tests/ncmp/common/utils.js b/k6-tests/ncmp/common/utils.js index 36ce6b48f3..ea77aae176 100644 --- a/k6-tests/ncmp/common/utils.js +++ b/k6-tests/ncmp/common/utils.js @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024-2025 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ * ============LICENSE_END========================================================= */ +import { randomIntBetween } from 'https://jslib.k6.io/k6-utils/1.2.0/index.js'; import http from 'k6/http'; export const testConfig = JSON.parse(open(`../config/${__ENV.TEST_PROFILE}.json`)); @@ -48,16 +49,29 @@ export function makeBatchOfCmHandleIds(batchSize, batchNumber) { } /** - * Generates an unordered batch of CM-handle IDs based on batch size. - * @returns {string[]} Array of CM-handle IDs, for example ['ch-8', 'ch-2' ... 'ch-32432'] + * Generates an unordered batch of Alternate IDs. + * The batch size is determined by `LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE`, + * and the IDs are generated within the range of `TOTAL_CM_HANDLES`. + * + * @returns {string[]} Array of Alternate IDs, for example, + * ['Region=NorthAmerica,Segment=8', 'Region=NorthAmerica,Segment=2' ... 'Region=NorthAmerica,Segment=32432'] */ -export function makeRandomBatchOfCmHandleIds() { - const cmHandleIds = new Set(); - while (cmHandleIds.size < LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE) { - const randomNum = Math.floor(Math.random() * TOTAL_CM_HANDLES) + 1; - cmHandleIds.add('ch-' + randomNum); +export function makeRandomBatchOfAlternateIds() { + const alternateIds = new Set(); + while (alternateIds.size < LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE) { + alternateIds.add(getRandomCmHandleReference(true)); } - return Array.from(cmHandleIds) + return Array.from(alternateIds) +} + +/** + * Generates a random CM Handle reference based on the provided flag. + * @param useAlternateId + * @returns {string} CM Handle reference representing a CM handle ID or an alternate ID. + */ +export function getRandomCmHandleReference(useAlternateId) { + const prefix = useAlternateId ? 'Region=NorthAmerica,Segment=' : 'ch-'; + return `${prefix}${randomIntBetween(1, TOTAL_CM_HANDLES)}`; } /** diff --git a/k6-tests/ncmp/ncmp-test-runner.js b/k6-tests/ncmp/ncmp-test-runner.js index 1104b14e4a..1c53139991 100644 --- a/k6-tests/ncmp/ncmp-test-runner.js +++ b/k6-tests/ncmp/ncmp-test-runner.js @@ -23,7 +23,7 @@ import { Trend } from 'k6/metrics'; import { Reader } from 'k6/x/kafka'; import { TOTAL_CM_HANDLES, READ_DATA_FOR_CM_HANDLE_DELAY_MS, WRITE_DATA_FOR_CM_HANDLE_DELAY_MS, - makeCustomSummaryReport, makeBatchOfCmHandleIds, makeRandomBatchOfCmHandleIds, + makeCustomSummaryReport, makeBatchOfCmHandleIds, makeRandomBatchOfAlternateIds, LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE, REGISTRATION_BATCH_SIZE, LEGACY_BATCH_THROUGHPUT_TEST_NUMBER_OF_REQUESTS, KAFKA_BOOTSTRAP_SERVERS, LEGACY_BATCH_TOPIC_NAME, CONTAINER_UP_TIME_IN_SECONDS, testConfig } from './common/utils.js'; @@ -183,7 +183,7 @@ export function cmHandleSearchCpsPathScenario() { export function cmHandleIdSearchTrustLevelScenario() { const response = executeCmHandleIdSearch('trust-level'); if (check(response, { 'CM handle ID trust level search status equals 200': (r) => r.status === 200 }) - && check(response, { 'CM handle ID trust level search returned the correct number of ids': (r) => r.json('#') === TOTAL_CM_HANDLES })) { + && check(response, { 'CM handle ID trust level search returned the correct number of cm handle references': (r) => r.json('#') === TOTAL_CM_HANDLES })) { idSearchTrustLevelDurationTrend.add(response.timings.duration); } } @@ -197,8 +197,8 @@ export function cmHandleSearchTrustLevelScenario() { } export function legacyBatchProduceScenario() { - const nextBatchOfCmHandleIds = makeRandomBatchOfCmHandleIds(); - const response = legacyBatchRead(nextBatchOfCmHandleIds); + const nextBatchOfAlternateIds = makeRandomBatchOfAlternateIds(); + const response = legacyBatchRead(nextBatchOfAlternateIds); check(response, { 'data operation batch read status equals 200': (r) => r.status === 200 }); } |