diff options
3 files changed, 231 insertions, 178 deletions
diff --git a/adapters/mso-vnf-adapter/src/main/java/org/openecomp/mso/adapters/vdu/mapper/VfModuleCustomizationToVduMapper.java b/adapters/mso-vnf-adapter/src/main/java/org/openecomp/mso/adapters/vdu/mapper/VfModuleCustomizationToVduMapper.java index 22d988f4e4..b04f3c30db 100644 --- a/adapters/mso-vnf-adapter/src/main/java/org/openecomp/mso/adapters/vdu/mapper/VfModuleCustomizationToVduMapper.java +++ b/adapters/mso-vnf-adapter/src/main/java/org/openecomp/mso/adapters/vdu/mapper/VfModuleCustomizationToVduMapper.java @@ -1,174 +1,174 @@ -/*-
- * ============LICENSE_START=======================================================
- * ONAP - SO
- * ================================================================================
- * Copyright (C) 2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.mso.adapters.vdu.mapper;
-
-import java.util.List;
-import java.util.Map;
-
-import org.openecomp.mso.adapters.vdu.VduArtifact;
-import org.openecomp.mso.adapters.vdu.VduArtifact.ArtifactType;
-import org.openecomp.mso.adapters.vdu.VduModelInfo;
-import org.openecomp.mso.db.catalog.CatalogDatabase;
-import org.openecomp.mso.db.catalog.beans.HeatEnvironment;
-import org.openecomp.mso.db.catalog.beans.HeatFiles;
-import org.openecomp.mso.db.catalog.beans.HeatTemplate;
-import org.openecomp.mso.db.catalog.beans.VfModuleCustomization;
-import org.openecomp.mso.logger.MsoLogger;
-import org.springframework.stereotype.Component;
-
-@Component
-public class VfModuleCustomizationToVduMapper {
-
- private static MsoLogger LOGGER = MsoLogger.getMsoLogger(MsoLogger.Catalog.RA);
-
- public VduModelInfo mapVfModuleCustomizationToVdu(VfModuleCustomization vfModuleCustom) throws Exception {
- CatalogDatabase db = CatalogDatabase.getInstance();
- VduModelInfo vduModel = new VduModelInfo();
- vduModel.setModelCustomizationUUID(vfModuleCustom.getModelCustomizationUuid());
- try {
- // Map the cloud templates, attached files, and environment file
- mapCloudTemplates(
- db.getHeatTemplateByArtifactUuid(vfModuleCustom.getVfModule().getHeatTemplateArtifactUUId()),
- vduModel);
- mapCloudFiles(vfModuleCustom, vduModel);
- mapEnvironment(db.getHeatEnvironmentByArtifactUuid(vfModuleCustom.getHeatEnvironmentArtifactUuid()),
- vduModel);
- } catch (Exception e) {
- LOGGER.debug("unhandled exception in mapVfModuleCustomizationToVdu", e);
- throw new Exception("Exception during mapVfModuleCustomizationToVdu " + e.getMessage());
- } finally {
- // Make sure DB session is closed
- db.close();
- }
-
- return vduModel;
- }
-
- public VduModelInfo mapVfModuleCustVolumeToVdu(VfModuleCustomization vfModuleCustom) throws Exception {
- CatalogDatabase db = CatalogDatabase.getInstance();
- VduModelInfo vduModel = new VduModelInfo();
- vduModel.setModelCustomizationUUID(vfModuleCustom.getModelCustomizationUuid());
- try {
- // Map the cloud templates, attached files, and environment file
- mapCloudTemplates(
- db.getHeatTemplateByArtifactUuid(vfModuleCustom.getVfModule().getVolHeatTemplateArtifactUUId()),
- vduModel);
- mapCloudFiles(vfModuleCustom, vduModel);
- mapEnvironment(db.getHeatEnvironmentByArtifactUuid(vfModuleCustom.getVolEnvironmentArtifactUuid()),
- vduModel);
- } catch (Exception e) {
- LOGGER.debug("unhandled exception in mapVfModuleCustVolumeToVdu", e);
- throw new Exception("Exception during mapVfModuleCustVolumeToVdu " + e.getMessage());
- } finally {
- // Make sure DB session is closed
- db.close();
- }
-
- return vduModel;
- }
-
- private void mapCloudTemplates(HeatTemplate heatTemplate, VduModelInfo vduModel) throws Exception {
- // TODO: These catalog objects will be refactored to be
- // non-Heat-specific
- CatalogDatabase db = CatalogDatabase.getInstance();
- try {
- List<VduArtifact> vduArtifacts = vduModel.getArtifacts();
-
- // Main template. Also set the VDU timeout based on the main
- // template.
- vduArtifacts.add(mapHeatTemplateToVduArtifact(heatTemplate, ArtifactType.MAIN_TEMPLATE));
- vduModel.setTimeoutMinutes(heatTemplate.getTimeoutMinutes());
-
- // Nested templates
- Map<String,Object> nestedTemplates = db.getNestedTemplates(heatTemplate.getArtifactUuid());
- if (nestedTemplates != null) {
- for (String name : nestedTemplates.keySet()) {
- String body = (String) nestedTemplates.get(name);
- VduArtifact vduArtifact = new VduArtifact(name, body.getBytes(), ArtifactType.NESTED_TEMPLATE);
- vduArtifacts.add(vduArtifact);
- }
- }
-
- } catch (Exception e) {
- LOGGER.debug("unhandled exception in mapCloudTemplates", e);
- throw new Exception("Exception during mapCloudTemplates " + e.getMessage());
- } finally {
- // Make sure DB session is closed
- db.close();
- }
- }
-
- private VduArtifact mapHeatTemplateToVduArtifact(HeatTemplate heatTemplate, ArtifactType artifactType) {
- VduArtifact vduArtifact = new VduArtifact();
- vduArtifact.setName(heatTemplate.getTemplateName());
- vduArtifact.setContent(heatTemplate.getHeatTemplate().getBytes());
- vduArtifact.setType(artifactType);
- return vduArtifact;
- }
-
- private void mapCloudFiles(VfModuleCustomization vfModuleCustom, VduModelInfo vduModel) throws Exception {
- // TODO: These catalog objects will be refactored to be
- // non-Heat-specific
- CatalogDatabase db = CatalogDatabase.getInstance();
-
- try{
- Map <String, HeatFiles> heatFiles = db.getHeatFilesForVfModule(vfModuleCustom.getVfModuleModelUuid());
- if (heatFiles != null) {
- for (HeatFiles heatFile: heatFiles.values()) {
- mapCloudFileToVduArtifact(heatFile, ArtifactType.TEXT_FILE);
- }
- }
- } catch (Exception e) {
- LOGGER.debug("unhandled exception in mapCloudFiles", e);
- throw new Exception("Exception during mapCloudFiles " + e.getMessage());
- } finally {
- // Make sure DB session is closed
- db.close();
- }
-
- }
-
- private VduArtifact mapCloudFileToVduArtifact(HeatFiles heatFile, ArtifactType artifactType) {
- VduArtifact vduArtifact = new VduArtifact();
- vduArtifact.setName(heatFile.getFileName());
- vduArtifact.setContent(heatFile.getFileBody().getBytes());
- vduArtifact.setType(artifactType);
- return vduArtifact;
- }
-
- private void mapEnvironment(HeatEnvironment heatEnvironment, VduModelInfo vduModel) {
- // TODO: These catalog objects will be refactored to be
- // non-Heat-specific
- if (heatEnvironment != null) {
- List<VduArtifact> vduArtifacts = vduModel.getArtifacts();
- vduArtifacts.add(mapEnvironmentFileToVduArtifact(heatEnvironment));
- }
- }
-
- private VduArtifact mapEnvironmentFileToVduArtifact(HeatEnvironment heatEnv) {
- VduArtifact vduArtifact = new VduArtifact();
- vduArtifact.setName(heatEnv.getName());
- vduArtifact.setContent(heatEnv.getEnvironment().getBytes());
- vduArtifact.setType(ArtifactType.ENVIRONMENT);
- return vduArtifact;
- }
-
+/*- + * ============LICENSE_START======================================================= + * ONAP - SO + * ================================================================================ + * Copyright (C) 2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.openecomp.mso.adapters.vdu.mapper; + +import java.sql.SQLException; +import java.util.List; +import java.util.Map; +import org.openecomp.mso.adapters.vdu.VduArtifact; +import org.openecomp.mso.adapters.vdu.VduArtifact.ArtifactType; +import org.openecomp.mso.adapters.vdu.VduModelInfo; +import org.openecomp.mso.db.catalog.CatalogDatabase; +import org.openecomp.mso.db.catalog.beans.HeatEnvironment; +import org.openecomp.mso.db.catalog.beans.HeatFiles; +import org.openecomp.mso.db.catalog.beans.HeatTemplate; +import org.openecomp.mso.db.catalog.beans.VfModuleCustomization; +import org.openecomp.mso.logger.MsoLogger; +import org.springframework.stereotype.Component; + +@Component +public class VfModuleCustomizationToVduMapper { + + private static MsoLogger LOGGER = MsoLogger.getMsoLogger(MsoLogger.Catalog.RA); + + public VduModelInfo mapVfModuleCustomizationToVdu(VfModuleCustomization vfModuleCustom) throws SQLException { + CatalogDatabase db = CatalogDatabase.getInstance(); + VduModelInfo vduModel = new VduModelInfo(); + vduModel.setModelCustomizationUUID(vfModuleCustom.getModelCustomizationUuid()); + try { + // Map the cloud templates, attached files, and environment file + mapCloudTemplates( + db.getHeatTemplateByArtifactUuid(vfModuleCustom.getVfModule().getHeatTemplateArtifactUUId()), + vduModel); + mapCloudFiles(vfModuleCustom, vduModel); + mapEnvironment(db.getHeatEnvironmentByArtifactUuid(vfModuleCustom.getHeatEnvironmentArtifactUuid()), + vduModel); + } catch (SQLException e) { + LOGGER.debug("unhandled exception in mapVfModuleCustomizationToVdu", e); + throw new SQLException("Exception during mapVfModuleCustomizationToVdu " + e.getMessage()); + } finally { + // Make sure DB session is closed + db.close(); + } + + return vduModel; + } + + public VduModelInfo mapVfModuleCustVolumeToVdu(VfModuleCustomization vfModuleCustom) throws SQLException { + CatalogDatabase db = CatalogDatabase.getInstance(); + VduModelInfo vduModel = new VduModelInfo(); + vduModel.setModelCustomizationUUID(vfModuleCustom.getModelCustomizationUuid()); + try { + // Map the cloud templates, attached files, and environment file + mapCloudTemplates( + db.getHeatTemplateByArtifactUuid(vfModuleCustom.getVfModule().getVolHeatTemplateArtifactUUId()), + vduModel); + mapCloudFiles(vfModuleCustom, vduModel); + mapEnvironment(db.getHeatEnvironmentByArtifactUuid(vfModuleCustom.getVolEnvironmentArtifactUuid()), + vduModel); + } catch (SQLException e) { + LOGGER.debug("unhandled exception in mapVfModuleCustVolumeToVdu", e); + throw new SQLException("Exception during mapVfModuleCustVolumeToVdu " + e.getMessage()); + } finally { + // Make sure DB session is closed + db.close(); + } + + return vduModel; + } + + private void mapCloudTemplates(HeatTemplate heatTemplate, VduModelInfo vduModel) throws SQLException { + // TODO: These catalog objects will be refactored to be + // non-Heat-specific + CatalogDatabase db = CatalogDatabase.getInstance(); + try { + List<VduArtifact> vduArtifacts = vduModel.getArtifacts(); + + // Main template. Also set the VDU timeout based on the main + // template. + vduArtifacts.add(mapHeatTemplateToVduArtifact(heatTemplate, ArtifactType.MAIN_TEMPLATE)); + vduModel.setTimeoutMinutes(heatTemplate.getTimeoutMinutes()); + + // Nested templates + Map<String,Object> nestedTemplates = db.getNestedTemplates(heatTemplate.getArtifactUuid()); + if (nestedTemplates != null) { + for (String name : nestedTemplates.keySet()) { + String body = (String) nestedTemplates.get(name); + VduArtifact vduArtifact = new VduArtifact(name, body.getBytes(), ArtifactType.NESTED_TEMPLATE); + vduArtifacts.add(vduArtifact); + } + } + + } catch (IllegalArgumentException e) { + LOGGER.debug("unhandled exception in mapCloudTemplates", e); + throw new IllegalArgumentException("Exception during mapCloudTemplates " + e.getMessage()); + } finally { + // Make sure DB session is closed + db.close(); + } + } + + private VduArtifact mapHeatTemplateToVduArtifact(HeatTemplate heatTemplate, ArtifactType artifactType) { + VduArtifact vduArtifact = new VduArtifact(); + vduArtifact.setName(heatTemplate.getTemplateName()); + vduArtifact.setContent(heatTemplate.getHeatTemplate().getBytes()); + vduArtifact.setType(artifactType); + return vduArtifact; + } + + private void mapCloudFiles(VfModuleCustomization vfModuleCustom, VduModelInfo vduModel) throws SQLException { + // TODO: These catalog objects will be refactored to be + // non-Heat-specific + CatalogDatabase db = CatalogDatabase.getInstance(); + + try{ + Map <String, HeatFiles> heatFiles = db.getHeatFilesForVfModule(vfModuleCustom.getVfModuleModelUuid()); + if (heatFiles != null) { + for (HeatFiles heatFile: heatFiles.values()) { + mapCloudFileToVduArtifact(heatFile, ArtifactType.TEXT_FILE); + } + } + } catch (IllegalArgumentException e) { + LOGGER.debug("unhandled exception in mapCloudFiles", e); + throw new IllegalArgumentException("Exception during mapCloudFiles " + e.getMessage()); + } finally { + // Make sure DB session is closed + db.close(); + } + + } + + private VduArtifact mapCloudFileToVduArtifact(HeatFiles heatFile, ArtifactType artifactType) { + VduArtifact vduArtifact = new VduArtifact(); + vduArtifact.setName(heatFile.getFileName()); + vduArtifact.setContent(heatFile.getFileBody().getBytes()); + vduArtifact.setType(artifactType); + return vduArtifact; + } + + private void mapEnvironment(HeatEnvironment heatEnvironment, VduModelInfo vduModel) { + // TODO: These catalog objects will be refactored to be + // non-Heat-specific + if (heatEnvironment != null) { + List<VduArtifact> vduArtifacts = vduModel.getArtifacts(); + vduArtifacts.add(mapEnvironmentFileToVduArtifact(heatEnvironment)); + } + } + + private VduArtifact mapEnvironmentFileToVduArtifact(HeatEnvironment heatEnv) { + VduArtifact vduArtifact = new VduArtifact(); + vduArtifact.setName(heatEnv.getName()); + vduArtifact.setContent(heatEnv.getEnvironment().getBytes()); + vduArtifact.setType(ArtifactType.ENVIRONMENT); + return vduArtifact; + } + }
\ No newline at end of file diff --git a/mso-api-handlers/mso-api-handler-infra/src/main/java/org/openecomp/mso/apihandlerinfra/E2EServiceInstances.java b/mso-api-handlers/mso-api-handler-infra/src/main/java/org/openecomp/mso/apihandlerinfra/E2EServiceInstances.java index e0e13e7884..01ca4df9f2 100644 --- a/mso-api-handlers/mso-api-handler-infra/src/main/java/org/openecomp/mso/apihandlerinfra/E2EServiceInstances.java +++ b/mso-api-handlers/mso-api-handler-infra/src/main/java/org/openecomp/mso/apihandlerinfra/E2EServiceInstances.java @@ -703,7 +703,7 @@ public class E2EServiceInstances { return response; } - if (curStatus != null && curStatus.getResult() != null && curStatus.getResult().equalsIgnoreCase("processing")) { + if ("processing".equalsIgnoreCase("curStatus != null && curStatus.getResult() != null && curStatus.getResult()")) { String chkMessage = "Error: Locked instance - This " + requestScope + " (" + serviceId + ") " + "now being worked with a status of " + curStatus.getResult() + ". The latest workflow of instance must be finished or cleaned up."; diff --git a/packages/docker/src/main/docker/docker-files/Dockerfile.mso-chef-final b/packages/docker/src/main/docker/docker-files/Dockerfile.mso-chef-final index 4b7bbaf776..771949fdfd 100644 --- a/packages/docker/src/main/docker/docker-files/Dockerfile.mso-chef-final +++ b/packages/docker/src/main/docker/docker-files/Dockerfile.mso-chef-final @@ -15,13 +15,67 @@ ENV https_proxy=$HTTPS_PROXY ENV CHEF_REPO_NAME="chef-repo" ENV CHEF_CONFIG_NAME="mso-config" -### Downloading dependencies +USER root +### Downloading dependencies +# Install specific system libraries to fix CVE vulnerabilities +RUN echo "deb http://archive.ubuntu.com/ubuntu/ artful main restricted" >> /etc/apt/sources.list && \ + echo "deb http://security.ubuntu.com/ubuntu/ artful-security main restricted" >> /etc/apt/sources.list && \ + echo "deb http://archive.ubuntu.com/ubuntu/ bionic main restricted" >> /etc/apt/sources.list && \ + apt-get -y update + +# krb5 1.16-2build1 +# For CVE-2017-15088 CVE-2017-11462 +# libvorbis 1.3.5-4ubuntu0.2 +# For CVE-2017-14632 CVE-2017-14160 +# libx11 2:1.6.4-3 +# For CVE-2016-7943 CVE-2016-7942 +# libxtst 1.2.3-1 +# For CVE-2016-7951 +# ncurses 6.1-1ubuntu1 +# For CVE-2017-10685 CVE-2017-10684 +# libsqllite3-0 3.22.0-1 +# For CVE-2017-10989 +# libtiff5 4.0.8-5ubuntu0.1 +# For CVE-2017-9117 CVE-2016-9540 CVE-2016-9539 CVE-2016-9538 CVE-2016-9537 CVE-2016-9536 CVE-2016-9535 CVE-2016-9534 CVE-2016-9533 CVE-2015-8668 CVE-2015-7554 CVE-2016-6223 CVE-2017-5563 CVE-2016-3621 CVE-2016-8331 +# shadow 1:4.5-1ubuntu1 +# For CVE-2017-12424 +# perl-base 5.26.0-8ubuntu1.1 +# For CVE-2015-8608 CVE-2017-12883 +# openssl 1.1.0g-2ubuntu3 +# For CVE-2016-6303 CVE-2016-2182 CVE-2016-2177 CVE-2016-2176 +# zlib1g 1:1.2.11.dfsg-0ubuntu2 +# For CVE-2016-9843 CVE-2016-9841 CVE-2016-9842 CVE-2016-9840 +# libexpat1 2.2.5-3 +# For CVE-2016-0718 CVE-2016-4472 +# libc-bin libc6 2.26-0ubuntu2.1 +# For CVE-2018-6485 +# openssl 1.1.0g-2ubuntu3 +# For CVE-2016-6303 CVE-2016-2182 CVE-2016-2177 +# libpcre3 2:8.39-5ubuntu3 +# For CVE-2016-3191 CVE-2016-1283 USER root -RUN apt-get install -y netcat curl && curl -LO https://packages.chef.io/stable/ubuntu/12.04/chefdk_0.17.17-1_amd64.deb && curl -LO http://central.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/1.5.4/mariadb-java-client-1.5.4.jar && apt-get remove --purge -y curl && apt-get autoremove -y +RUN apt-get -y install \ + libkrb5-3=1.16-2build1 krb5-locales=1.16-2build1 \ + libvorbis0a=1.3.5-4ubuntu0.2 \ + libx11-6=2:1.6.4-3 libx11-data=2:1.6.4-3 libx11-doc=2:1.6.4-3 libx11-xcb1=2:1.6.4-3 \ + libxtst6=2:1.2.3-1 \ + ncurses-base=6.1-1ubuntu1 ncurses-bin=6.1-1ubuntu1 libncurses5=6.1-1ubuntu1 libncursesw5=6.1-1ubuntu1 \ + libsqlite3-0=3.22.0-1 \ + libtiff5=4.0.8-5ubuntu0.1 \ + passwd=1:4.5-1ubuntu1 \ + perl-base=5.26.0-8ubuntu1.1 \ + zlib1g=1:1.2.11.dfsg-0ubuntu2 \ + libexpat1=2.2.5-3 \ + libc-bin=2.26-0ubuntu2.1 libc6=2.26-0ubuntu2.1 \ + openssl=1.1.0g-2ubuntu3 \ + libpcre3=2:8.39-5ubuntu3 + +RUN apt-get install -y netcat curl && curl -LO https://packages.chef.io/files/stable/chefdk/2.5.3/ubuntu/16.04/chefdk_2.5.3-1_amd64.deb && curl -LO http://central.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/1.5.4/mariadb-java-client-1.5.4.jar && apt-get remove --purge -y curl && apt-get autoremove -y ### Install Chef -RUN dpkg -i chefdk_0.17.17-1_amd64.deb +#RUN dpkg -i chefdk_0.17.17-1_amd64.deb +RUN dpkg -i chefdk_2.5.3-1_amd64.deb COPY scripts/start-jboss-server.sh /opt/mso/scripts/start-jboss-server.sh @@ -97,4 +151,3 @@ VOLUME /shared ### Start EAP USER root CMD ["/opt/mso/scripts/start-jboss-server.sh"] - |